From b44e6c5164437fcc07db9c6b2288dc6552577b54 Mon Sep 17 00:00:00 2001 From: abhi1992002 Date: Sat, 21 Dec 2024 22:27:06 +0530 Subject: [PATCH 1/2] small-changes --- .../frontend/src/app/agents/page.tsx | 15 +++ autogpt_platform/frontend/src/app/globals.css | 5 + autogpt_platform/frontend/src/app/layout.tsx | 16 ++- .../frontend/src/components/agptui/Button.tsx | 11 +- .../agptui/LibraryNotificationDropdown.tsx | 109 ++++++++++++++++++ .../components/agptui/LibrarySearchBar.tsx | 0 .../components/agptui/LibraryUploadAgent.tsx | 0 .../agptui/composite/LibraryActionHeader.tsx | 26 +++++ .../composite/LibraryAgentListContainer.tsx | 0 .../src/components/ui/dropdown-menu.tsx | 2 +- autogpt_platform/frontend/tailwind.config.ts | 1 + 11 files changed, 180 insertions(+), 5 deletions(-) create mode 100644 autogpt_platform/frontend/src/app/agents/page.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/composite/LibraryActionHeader.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/composite/LibraryAgentListContainer.tsx diff --git a/autogpt_platform/frontend/src/app/agents/page.tsx b/autogpt_platform/frontend/src/app/agents/page.tsx new file mode 100644 index 000000000000..590ed46e0ac2 --- /dev/null +++ b/autogpt_platform/frontend/src/app/agents/page.tsx @@ -0,0 +1,15 @@ +import LibraryActionHeader from "@/components/agptui/composite/LibraryActionHeader"; + +const LibraryPage = () => { + return ( +
+ {/* Top section - includes notification, search and uploading mechansim */} + + + {/* Last section for Agent Lists, Agent counter and filter */} +
+
+ ); +}; + +export default LibraryPage; diff --git a/autogpt_platform/frontend/src/app/globals.css b/autogpt_platform/frontend/src/app/globals.css index c2998c08d07d..166682a27686 100644 --- a/autogpt_platform/frontend/src/app/globals.css +++ b/autogpt_platform/frontend/src/app/globals.css @@ -120,3 +120,8 @@ @apply shadow-sm focus-visible:shadow-md; } } + +.scroll-none { + scrollbar-width: thin; /* For Firefox (sets a thin scrollbar) */ + scrollbar-color: transparent transparent; /* For Firefox (thumb and track colors) */ +} diff --git a/autogpt_platform/frontend/src/app/layout.tsx b/autogpt_platform/frontend/src/app/layout.tsx index 944e2301d395..c4150c7958a6 100644 --- a/autogpt_platform/frontend/src/app/layout.tsx +++ b/autogpt_platform/frontend/src/app/layout.tsx @@ -1,6 +1,6 @@ import React from "react"; import type { Metadata } from "next"; -import { Inter } from "next/font/google"; +import { Inter, Poppins } from "next/font/google"; import { Providers } from "@/app/providers"; import { cn } from "@/lib/utils"; import { Navbar } from "@/components/agptui/Navbar"; @@ -13,6 +13,12 @@ import { IconType } from "@/components/ui/icons"; const inter = Inter({ subsets: ["latin"] }); +const poppins = Poppins({ + subsets: ["latin"], + weight: ["400", "500", "600", "700"], + variable: "--font-poppins", +}); + export const metadata: Metadata = { title: "NextGen AutoGPT", description: "Your one stop shop to creating AI Agents", @@ -25,7 +31,13 @@ export default async function RootLayout({ }>) { return ( - + ( diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx new file mode 100644 index 000000000000..d97eb6539d3e --- /dev/null +++ b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx @@ -0,0 +1,109 @@ +"use client"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Button } from "./Button"; +import { BellIcon, X } from "lucide-react"; +import { motion, useAnimationControls } from "framer-motion"; +import { useState } from "react"; +import { + Card, + CardContent, + CardDescription, + CardFooter, + CardHeader, + CardTitle, +} from "../ui/card"; + +export const LibraryNotificationDropdown = () => { + const controls = useAnimationControls(); + const [open, setOpen] = useState(false); + + const handleHoverStart = () => { + controls.start({ + rotate: [0, -10, 10, -10, 10, 0], + transition: { duration: 0.5 }, + }); + }; + return ( + + + + + + + Agent run updates + + + + + + + + + + + + + + ); +}; + +const LibraryNotificationCard = () => { + return ( + + + Latest Agent Updates + View your latest workflow changes + + + +
+
+
+
+

Agent Run #1234

+

Updated 2 hours ago

+
+
+ +
+
+
+

Workflow Changes

+

3 new changes detected

+
+
+
+ + + + + + + ); +}; diff --git a/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx b/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/autogpt_platform/frontend/src/components/agptui/composite/LibraryActionHeader.tsx b/autogpt_platform/frontend/src/components/agptui/composite/LibraryActionHeader.tsx new file mode 100644 index 000000000000..bf0162cc1170 --- /dev/null +++ b/autogpt_platform/frontend/src/components/agptui/composite/LibraryActionHeader.tsx @@ -0,0 +1,26 @@ +import { LibraryNotificationDropdown } from "../LibraryNotificationDropdown"; + +const LibraryActionHeader: React.FC = () => { + return ( +
+ + + +
+ ); +}; + +const LibrarySearchBar = () => { + return ( +
+ SearchBar + {/* Search bar content */} +
+ ); +}; + +const LibraryUploadAgent = () => { + return
Uploading Agent
; +}; + +export default LibraryActionHeader; diff --git a/autogpt_platform/frontend/src/components/agptui/composite/LibraryAgentListContainer.tsx b/autogpt_platform/frontend/src/components/agptui/composite/LibraryAgentListContainer.tsx new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/autogpt_platform/frontend/src/components/ui/dropdown-menu.tsx b/autogpt_platform/frontend/src/components/ui/dropdown-menu.tsx index 6f245e8d12a5..cf415ecb3941 100644 --- a/autogpt_platform/frontend/src/components/ui/dropdown-menu.tsx +++ b/autogpt_platform/frontend/src/components/ui/dropdown-menu.tsx @@ -88,7 +88,7 @@ const DropdownMenuItem = React.forwardRef< Date: Fri, 27 Dec 2024 10:01:45 +0530 Subject: [PATCH 2/2] draft-1 --- .../backend/backend/server/v2/library/db.py | 127 +++++++++++- .../backend/server/v2/library/routes.py | 174 ++++++++++++----- autogpt_platform/frontend/next.config.mjs | 1 + autogpt_platform/frontend/package.json | 3 + .../frontend/src/app/agents/page.tsx | 31 ++- autogpt_platform/frontend/src/app/globals.css | 13 ++ autogpt_platform/frontend/src/app/layout.tsx | 12 +- .../frontend/src/app/monitoring/page.tsx | 2 +- .../frontend/src/components/agptui/Button.tsx | 4 +- .../components/agptui/LibraryAgentCard.tsx | 52 +++++ .../components/agptui/LibraryAgentFilter.tsx | 68 +++++++ .../agptui/LibraryNotificationCard.tsx | 183 ++++++++++++++++++ .../agptui/LibraryNotificationDropdown.tsx | 160 +++++++++------ .../components/agptui/LibrarySearchBar.tsx | 104 ++++++++++ .../components/agptui/LibraryUploadAgent.tsx | 168 ++++++++++++++++ .../frontend/src/components/agptui/Navbar.tsx | 2 +- .../agptui/composite/LibraryActionHeader.tsx | 148 ++++++++++++-- .../composite/LibraryAgentListContainer.tsx | 120 ++++++++++++ .../frontend/src/hooks/useThreshold.ts | 37 ++++ .../src/lib/autogpt-server-api/client.ts | 28 ++- .../src/lib/autogpt-server-api/types.ts | 1 + autogpt_platform/frontend/tailwind.config.ts | 1 + autogpt_platform/frontend/yarn.lock | 99 +++++++++- 23 files changed, 1387 insertions(+), 151 deletions(-) create mode 100644 autogpt_platform/frontend/src/components/agptui/LibraryAgentCard.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/LibraryAgentFilter.tsx create mode 100644 autogpt_platform/frontend/src/components/agptui/LibraryNotificationCard.tsx create mode 100644 autogpt_platform/frontend/src/hooks/useThreshold.ts diff --git a/autogpt_platform/backend/backend/server/v2/library/db.py b/autogpt_platform/backend/backend/server/v2/library/db.py index 8d142ef40c76..9acc609c4804 100644 --- a/autogpt_platform/backend/backend/server/v2/library/db.py +++ b/autogpt_platform/backend/backend/server/v2/library/db.py @@ -1,5 +1,6 @@ import logging from typing import List +import typing import prisma.errors import prisma.models @@ -15,17 +16,21 @@ async def get_library_agents( user_id: str, + limit: int = 20, + offset: int = 0, ) -> List[backend.server.v2.library.model.LibraryAgent]: """ - Returns all agents (AgentGraph) that belong to the user and all agents in their library (UserAgent table) + Returns paginated agents (AgentGraph) that belong to the user and agents in their library (UserAgent table) """ - logger.debug(f"Getting library agents for user {user_id}") + logger.debug(f"Getting library agents for user {user_id} with limit {limit} offset {offset}") try: # Get agents created by user with nodes and links user_created = await prisma.models.AgentGraph.prisma().find_many( where=prisma.types.AgentGraphWhereInput(userId=user_id, isActive=True), include=backend.data.includes.AGENT_GRAPH_INCLUDE, + skip=offset, + take=limit, ) # Get agents in user's library with nodes and links @@ -47,6 +52,8 @@ async def get_library_agents( } } }, + skip=offset, + take=limit, ) # Convert to Graph models first @@ -94,6 +101,122 @@ async def get_library_agents( "Failed to fetch library agents" ) from e +async def search_library_agents( + user_id: str, + search_term: str, + sort_by: typing.Literal["most_recent", "highest_runtime", "most_runs", "alphabetical", "last_modified"], + limit: int = 20, + offset: int = 0, +) -> List[backend.server.v2.library.model.LibraryAgent]: + """ + Searches paginated agents (AgentGraph) that belong to the user and agents in their library (UserAgent table) + based on name or description containing the search term + """ + logger.debug(f"Searching library agents for user {user_id} with term '{search_term}', limit {limit} offset {offset}") + + try: + # Get sort field + sort_order = "desc" if sort_by in ["most_recent", "highest_runtime", "most_runs", "last_modified"] else "asc" + + sort_field = { + "most_recent": "createdAt", + "last_modified": "updatedAt", + "highest_runtime": "totalRuntime", + "most_runs": "runCount", + "alphabetical": "name" + }.get(sort_by, "updatedAt") + + # Get user created agents matching search + user_created = await prisma.models.AgentGraph.prisma().find_many( + where=prisma.types.AgentGraphWhereInput( + userId=user_id, + isActive=True, + OR=[ + {"name": {"contains": search_term, "mode": "insensitive"}}, + {"description": {"contains": search_term, "mode": "insensitive"}} + ] + ), + include=backend.data.includes.AGENT_GRAPH_INCLUDE, + order={sort_field: sort_order}, + skip=offset, + take=limit, + ) + + # Get library agents matching search + library_agents = await prisma.models.UserAgent.prisma().find_many( + where=prisma.types.UserAgentWhereInput( + userId=user_id, + isDeleted=False, + isArchived=False, + Agent={ + "is": { + "OR": [ + {"name": {"contains": search_term, "mode": "insensitive"}}, + {"description": {"contains": search_term, "mode": "insensitive"}} + ] + } + } + ), + include={ + "Agent": { + "include": { + "AgentNodes": { + "include": { + "Input": True, + "Output": True, + "Webhook": True, + "AgentBlock": True, + } + } + } + } + }, + skip=offset, + take=limit, + ) + + # Convert to Graph models + graphs = [] + + for agent in user_created: + try: + graphs.append(backend.data.graph.GraphModel.from_db(agent)) + except Exception as e: + logger.error(f"Error processing searched user agent {agent.id}: {e}") + continue + + for agent in library_agents: + if agent.Agent: + try: + graphs.append(backend.data.graph.GraphModel.from_db(agent.Agent)) + except Exception as e: + logger.error(f"Error processing searched library agent {agent.agentId}: {e}") + continue + + # Convert to LibraryAgent models + result = [] + for graph in graphs: + result.append( + backend.server.v2.library.model.LibraryAgent( + id=graph.id, + version=graph.version, + is_active=graph.is_active, + name=graph.name, + description=graph.description, + isCreatedByUser=any(a.id == graph.id for a in user_created), + input_schema=graph.input_schema, + output_schema=graph.output_schema, + ) + ) + + logger.debug(f"Found {len(result)} library agents matching search") + return result + + except prisma.errors.PrismaError as e: + logger.error(f"Database error searching library agents: {str(e)}") + raise backend.server.v2.store.exceptions.DatabaseError( + "Failed to search library agents" + ) from e async def add_agent_to_library(store_listing_version_id: str, user_id: str) -> None: """ diff --git a/autogpt_platform/backend/backend/server/v2/library/routes.py b/autogpt_platform/backend/backend/server/v2/library/routes.py index 0c3b1a77ec93..2223673e7b08 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes.py +++ b/autogpt_platform/backend/backend/server/v2/library/routes.py @@ -4,7 +4,6 @@ import autogpt_libs.auth.depends import autogpt_libs.auth.middleware import fastapi -import prisma import backend.data.graph import backend.integrations.creds_manager @@ -28,14 +27,37 @@ async def get_library_agents( user_id: typing.Annotated[ str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id) - ] -) -> typing.Sequence[backend.server.v2.library.model.LibraryAgent]: + ], + pagination_token: str | None = fastapi.Query(None) +) -> dict[str, typing.Any]: """ - Get all agents in the user's library, including both created and saved agents. + Get agents in the user's library with pagination (20 agents per page). + + Args: + user_id: ID of the authenticated user + pagination_token: Token to get next page of results + + Returns: + Dictionary containing: + - agents: List of agents for current page + - next_token: Token to get next page (None if no more pages) """ try: - agents = await backend.server.v2.library.db.get_library_agents(user_id) - return agents + page_size = 20 + agents = await backend.server.v2.library.db.get_library_agents( + user_id, + limit=page_size + 1, + offset=int(pagination_token) if pagination_token else 0 + ) + + has_more = len(agents) > page_size + agents = agents[:page_size] + next_token = str(int(pagination_token or 0) + page_size) if has_more else None + + return { + "agents": agents, + "next_token": next_token + } except Exception: logger.exception("Exception occurred whilst getting library agents") raise fastapi.HTTPException( @@ -43,6 +65,58 @@ async def get_library_agents( ) +# For searching and filtering the library agents +@router.get( + "/agents/search", + tags=["library", "private"], + dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)], +) +async def search_library_agents( + user_id: typing.Annotated[ + str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id) + ], + search_term: str = fastapi.Query(..., description="Search term to filter agents"), + sort_by: typing.Literal["most_recent", "highest_runtime", "most_runs", "alphabetical", "last_modified"] = fastapi.Query("most_recent", description="Sort results by criteria"), + pagination_token: str | None = fastapi.Query(None) +) -> dict[str, typing.Any]: + """ + Search for agents in the user's library with pagination (20 agents per page). + + Args: + user_id: ID of the authenticated user + search_term: Term to search for in agent names/descriptions + sort_by: How to sort results (most_recent, highest_runtime, most_runs, alphabetical, last_modified) + pagination_token: Token to get next page of results + + Returns: + Dictionary containing: + - agents: List of matching agents for current page + - next_token: Token to get next page (None if no more pages) + """ + try: + page_size = 20 + agents = await backend.server.v2.library.db.search_library_agents( + user_id, + search_term, + sort_by=sort_by, + limit=page_size + 1, + offset=int(pagination_token) if pagination_token else 0 + ) + + has_more = len(agents) > page_size + agents = agents[:page_size] + next_token = str(int(pagination_token or 0) + page_size) if has_more else None + + return { + "agents": agents, + "next_token": next_token + } + except Exception: + logger.exception("Exception occurred whilst searching library agents") + raise fastapi.HTTPException( + status_code=500, detail="Failed to search library agents" + ) + @router.post( "/agents/{store_listing_version_id}", tags=["library", "private"], @@ -70,49 +144,51 @@ async def add_agent_to_library( """ try: # Get the graph from the store listing - store_listing_version = ( - await prisma.models.StoreListingVersion.prisma().find_unique( - where={"id": store_listing_version_id}, include={"Agent": True} - ) - ) - - if not store_listing_version or not store_listing_version.Agent: - raise fastapi.HTTPException( - status_code=404, - detail=f"Store listing version {store_listing_version_id} not found", - ) - - agent = store_listing_version.Agent - - if agent.userId == user_id: - raise fastapi.HTTPException( - status_code=400, detail="Cannot add own agent to library" - ) - - # Create a new graph from the template - graph = await backend.data.graph.get_graph( - agent.id, agent.version, template=True, user_id=user_id - ) - - if not graph: - raise fastapi.HTTPException( - status_code=404, detail=f"Agent {agent.id} not found" - ) - - # Create a deep copy with new IDs - graph.version = 1 - graph.is_template = False - graph.is_active = True - graph.reassign_ids(user_id=user_id, reassign_graph_id=True) - - # Save the new graph - graph = await backend.data.graph.create_graph(graph, user_id=user_id) - graph = ( - await backend.integrations.webhooks.graph_lifecycle_hooks.on_graph_activate( - graph, - get_credentials=lambda id: integration_creds_manager.get(user_id, id), - ) - ) + # store_listing_version = ( + # await prisma.models.StoreListingVersion.prisma().find_unique( + # where={"id": store_listing_version_id}, include={"Agent": True} + # ) + # ) + + # if not store_listing_version or not store_listing_version.Agent: + # raise fastapi.HTTPException( + # status_code=404, + # detail=f"Store listing version {store_listing_version_id} not found", + # ) + + # agent = store_listing_version.Agent + + # if agent.userId == user_id: + # raise fastapi.HTTPException( + # status_code=400, detail="Cannot add own agent to library" + # ) + + # # Create a new graph from the template + # graph = await backend.data.graph.get_graph( + # agent.id, agent.version, template=True, user_id=user_id + # ) + + # if not graph: + # raise fastapi.HTTPException( + # status_code=404, detail=f"Agent {agent.id} not found" + # ) + + # # Create a deep copy with new IDs + # graph.version = 1 + # graph.is_template = False + # graph.is_active = True + # graph.reassign_ids(user_id=user_id, reassign_graph_id=True) + + # # Save the new graph + # graph = await backend.data.graph.create_graph(graph, user_id=user_id) + # graph = ( + # await backend.integrations.webhooks.graph_lifecycle_hooks.on_graph_activate( + # graph, + # get_credentials=lambda id: integration_creds_manager.get(user_id, id), + # ) + # ) + + await backend.server.v2.library.db.add_agent_to_library(store_listing_version_id=store_listing_version_id,user_id=user_id) return fastapi.Response(status_code=201) diff --git a/autogpt_platform/frontend/next.config.mjs b/autogpt_platform/frontend/next.config.mjs index 6700943d43e2..b7f69edfdb42 100644 --- a/autogpt_platform/frontend/next.config.mjs +++ b/autogpt_platform/frontend/next.config.mjs @@ -19,6 +19,7 @@ const nextConfig = { typescript: { ignoreBuildErrors: true, }, + transpilePackages: ["geist"], }; export default withSentryConfig(nextConfig, { diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index 1d3545133a7e..7387ab5cc092 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -62,6 +62,7 @@ "framer-motion": "^11.15.0", "geist": "^1.3.1", "launchdarkly-react-client-sdk": "^3.6.0", + "lodash.debounce": "^4.0.8", "lucide-react": "^0.468.0", "moment": "^2.30.1", "next": "^14.2.13", @@ -69,6 +70,7 @@ "react": "^18", "react-day-picker": "^9.4.4", "react-dom": "^18", + "react-drag-drop-files": "^2.4.0", "react-hook-form": "^7.54.0", "react-icons": "^5.4.0", "react-markdown": "^9.0.1", @@ -93,6 +95,7 @@ "@storybook/react": "^8.3.5", "@storybook/test": "^8.3.5", "@storybook/test-runner": "^0.20.1", + "@types/lodash": "^4.17.13", "@types/negotiator": "^0.6.3", "@types/node": "^22.9.0", "@types/react": "^18", diff --git a/autogpt_platform/frontend/src/app/agents/page.tsx b/autogpt_platform/frontend/src/app/agents/page.tsx index 590ed46e0ac2..9bc53434efbb 100644 --- a/autogpt_platform/frontend/src/app/agents/page.tsx +++ b/autogpt_platform/frontend/src/app/agents/page.tsx @@ -1,13 +1,34 @@ +"use client"; + import LibraryActionHeader from "@/components/agptui/composite/LibraryActionHeader"; +import LibraryAgentListContainer from "@/components/agptui/composite/LibraryAgentListContainer"; +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { useState } from "react"; + +/** + * LibraryPage Component + * Main component that manages the library interface including agent listing and actions + */ const LibraryPage = () => { + const [agents, setAgents] = useState([]); + const [agentLoading, setAgentLoading] = useState(true); return ( -
- {/* Top section - includes notification, search and uploading mechansim */} - +
+ {/* Header section containing notifications, search functionality, agent count, filters and upload mechanism */} + - {/* Last section for Agent Lists, Agent counter and filter */} -
+ {/* Content section displaying agent list with counter and filtering options */} +
); }; diff --git a/autogpt_platform/frontend/src/app/globals.css b/autogpt_platform/frontend/src/app/globals.css index 166682a27686..eaa5fe00f1f0 100644 --- a/autogpt_platform/frontend/src/app/globals.css +++ b/autogpt_platform/frontend/src/app/globals.css @@ -125,3 +125,16 @@ scrollbar-width: thin; /* For Firefox (sets a thin scrollbar) */ scrollbar-color: transparent transparent; /* For Firefox (thumb and track colors) */ } + +body { + overflow-x: hidden; +} + +.drop-style { + border: dashed 2px #a3a3a3 !important; + border-radius: 20px; +} + +.drop-style:hover { + border: dashed 2px #525252 !important; +} diff --git a/autogpt_platform/frontend/src/app/layout.tsx b/autogpt_platform/frontend/src/app/layout.tsx index c4150c7958a6..7626dfbffd14 100644 --- a/autogpt_platform/frontend/src/app/layout.tsx +++ b/autogpt_platform/frontend/src/app/layout.tsx @@ -4,6 +4,8 @@ import { Inter, Poppins } from "next/font/google"; import { Providers } from "@/app/providers"; import { cn } from "@/lib/utils"; import { Navbar } from "@/components/agptui/Navbar"; +import { GeistSans } from "geist/font/sans"; +import { GeistMono } from "geist/font/mono"; import "./globals.css"; import TallyPopupSimple from "@/components/TallyPopup"; @@ -11,7 +13,7 @@ import { GoogleAnalytics } from "@next/third-parties/google"; import { Toaster } from "@/components/ui/toaster"; import { IconType } from "@/components/ui/icons"; -const inter = Inter({ subsets: ["latin"] }); +const inter = Inter({ subsets: ["latin"], variable: "--font-inter" }); const poppins = Poppins({ subsets: ["latin"], @@ -30,12 +32,14 @@ export default async function RootLayout({ children: React.ReactNode; }>) { return ( - + { const fetchAgents = useCallback(() => { api.listLibraryAgents().then((agent) => { - setFlows(agent); + setFlows(agent.agents); }); api.getExecutions().then((executions) => { setExecutions(executions); diff --git a/autogpt_platform/frontend/src/components/agptui/Button.tsx b/autogpt_platform/frontend/src/components/agptui/Button.tsx index 206ee4257022..d59a68c73acd 100644 --- a/autogpt_platform/frontend/src/components/agptui/Button.tsx +++ b/autogpt_platform/frontend/src/components/agptui/Button.tsx @@ -21,8 +21,8 @@ const buttonVariants = cva( "hover:bg-neutral-100 text-[#272727] dark:text-neutral-100 dark:hover:bg-neutral-700", link: "text-[#272727] underline-offset-4 hover:underline dark:text-neutral-100", library_outline: - "rounded-[52px] hover:bg-[#262626] border border-zinc-700 hover:text-white", - library_primary: "rounded-[52px] bg-[#262626] text-white", + "rounded-[52px] hover:bg-[#262626] border border-zinc-700 hover:text-white font-sans", + library_primary: "rounded-[52px] bg-[#262626] text-white font-sans", }, size: { default: diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryAgentCard.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryAgentCard.tsx new file mode 100644 index 000000000000..021df5ef1425 --- /dev/null +++ b/autogpt_platform/frontend/src/components/agptui/LibraryAgentCard.tsx @@ -0,0 +1,52 @@ +import { cn } from "@/lib/utils"; +import Link from "next/link"; +import { GraphMeta } from "@/lib/autogpt-server-api"; + +export const LibraryAgentCard = ({ id, name, isCreatedByUser }: GraphMeta) => { + return ( +
+
+

+ {name} +

+ {/* */} +
+ +
+
+ + See runs + + + + Open in builder + +
+ {/* {output && ( +
+ New output +
+ )} */} +
+
+ ); +}; diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryAgentFilter.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryAgentFilter.tsx new file mode 100644 index 000000000000..40986f75060e --- /dev/null +++ b/autogpt_platform/frontend/src/components/agptui/LibraryAgentFilter.tsx @@ -0,0 +1,68 @@ +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { Dispatch, SetStateAction } from "react"; +import { + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectTrigger, + SelectValue, +} from "../ui/select"; +import { Filter } from "lucide-react"; +import { useBackendAPI } from "@/lib/autogpt-server-api/context"; + +type SortValue = + | "most_recent" + | "highest_runtime" + | "most_runs" + | "alphabetical" + | "last_modified"; + +const LibraryAgentFilter = ({ + setAgents, + setAgentLoading, +}: { + setAgents: Dispatch>; + setAgentLoading: Dispatch>; +}) => { + const api = useBackendAPI(); + const handleSortChange = async (value: SortValue) => { + setAgentLoading(true); + await new Promise((resolve) => setTimeout(resolve, 1000)); + let response = await api.librarySearchAgent("", undefined, value); + setAgents(response.agents); + setAgentLoading(false); + }; + + return ( +
+ sort by + +
+ ); +}; + +export default LibraryAgentFilter; diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryNotificationCard.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationCard.tsx new file mode 100644 index 000000000000..9575021992be --- /dev/null +++ b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationCard.tsx @@ -0,0 +1,183 @@ +import { Button } from "../ui/button"; +import Image from "next/image"; +import { Separator } from "../ui/separator"; +import { + CirclePlayIcon, + ClipboardCopy, + ImageIcon, + Play, + PlayCircle, + Share2, + X, +} from "lucide-react"; +import { Dispatch, SetStateAction } from "react"; + +export interface NotificationCardData { + type: "text" | "image" | "video" | "audio"; + title: string; + id: string; + content?: string; + mediaUrl?: string; +} + +interface NotificationCardProps extends NotificationCardData { + setNotifications: Dispatch>; +} + +const NotificationCard = ({ + type, + title, + id, + content, + mediaUrl, + setNotifications, +}: NotificationCardProps) => { + const barHeights = Array.from({ length: 60 }, () => + Math.floor(Math.random() * (34 - 20 + 1) + 20), + ); + + const handleClose = (e: React.MouseEvent) => { + e.preventDefault(); + setNotifications((prev) => { + if (!prev) return null; + return prev.filter((notification) => notification.id !== id); + }); + }; + + return ( +
+
+ {/* count */} +
+

+ 1/4 +

+

+ Success +

+
+ + {/* cross icon */} + +
+ +
+

+ New Output Ready! +

+

+ {title} +

+ {type === "text" && } +
+ +
+ {type === "text" && ( + // Maybe in future we give markdown support +
+ {content} +
+ )} + + {type === "image" && + (mediaUrl ? ( +
+ {title} +
+ ) : ( +
+ +
+ ))} + + {type === "video" && ( +
+ {mediaUrl ? ( +
+ )} + + {type === "audio" && ( +
+ +
+ {/*
+ )} +
+ +
+
+ + +
+ +
+
+ ); +}; + +export default NotificationCard; diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx index d97eb6539d3e..9aed7436c152 100644 --- a/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx +++ b/autogpt_platform/frontend/src/components/agptui/LibraryNotificationDropdown.tsx @@ -4,25 +4,63 @@ import { DropdownMenuContent, DropdownMenuItem, DropdownMenuLabel, - DropdownMenuSeparator, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; import { Button } from "./Button"; import { BellIcon, X } from "lucide-react"; -import { motion, useAnimationControls } from "framer-motion"; -import { useState } from "react"; -import { - Card, - CardContent, - CardDescription, - CardFooter, - CardHeader, - CardTitle, -} from "../ui/card"; +import { motion, useAnimationControls, useScroll } from "framer-motion"; +import { useState, useEffect } from "react"; +import LibraryNotificationCard, { + NotificationCardData, +} from "./LibraryNotificationCard"; +import { cn } from "@/lib/utils"; export const LibraryNotificationDropdown = () => { const controls = useAnimationControls(); const [open, setOpen] = useState(false); + const [notifications, setNotifications] = useState< + NotificationCardData[] | null + >(null); + const { scrollY } = useScroll(); + const [scrollPosition, setScrollPosition] = useState(0); + + useEffect(() => { + const unsubscribe = scrollY.onChange((currentY) => { + setScrollPosition(currentY); + }); + return () => unsubscribe(); + }, [scrollY]); + + const initialNotificationData = [ + { + type: "audio" as "audio", + title: "Audio Processing Complete", + id: "4", + }, + { + type: "text" as "text", + title: "LinkedIn Post Generator: YouTube to Professional Content", + id: "1", + content: + "As artificial intelligence (AI) continues to evolve, it's increasingly clear that AI isn't just a trend—it's reshaping the way we work, innovate, and solve complex problems. However, for many professionals, the question remains: How can I leverage AI to drive meaningful results in my own field? In this article, we'll explore how AI can empower businesses and individuals alike to be more efficient, make better decisions, and unlock new opportunities. Whether you're in tech, finance, healthcare, or any other industry, understanding the potential of AI can set you apart.", + }, + { + type: "image" as "image", + title: "New Image Upload", + id: "2", + }, + { + type: "video" as "video", + title: "Video Processing Complete", + id: "3", + }, + ] as NotificationCardData[]; + + useEffect(() => { + if (initialNotificationData) { + setNotifications(initialNotificationData); + } + }, []); const handleHoverStart = () => { controls.start({ @@ -30,25 +68,49 @@ export const LibraryNotificationDropdown = () => { transition: { duration: 0.5 }, }); }; + return ( - + - - + + Agent run updates - - - - - - - - - - - - ); -}; - -const LibraryNotificationCard = () => { - return ( - - - Latest Agent Updates - View your latest workflow changes - - - -
-
-
-
-

Agent Run #1234

-

Updated 2 hours ago

-
-
- -
-
-
-

Workflow Changes

-

3 new changes detected

+
+ {notifications && notifications.length ? ( + notifications.map((notification) => ( + + + + )) + ) : ( +
+ No notifications present
-
+ )}
- - - - - - + + ); }; diff --git a/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx b/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx index e69de29bb2d1..52e98b95f3bd 100644 --- a/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx +++ b/autogpt_platform/frontend/src/components/agptui/LibrarySearchBar.tsx @@ -0,0 +1,104 @@ +"use client"; +import { Search, X } from "lucide-react"; +import { Input } from "../ui/input"; +import { Dispatch, SetStateAction, useRef, useState, useCallback } from "react"; +import { motion, AnimatePresence } from "framer-motion"; +import { GraphMeta } from "@/lib/autogpt-server-api"; +import debounce from "lodash/debounce"; +import { useBackendAPI } from "@/lib/autogpt-server-api/context"; + +export const LibrarySearchBar = ({ + setAgents, + setAgentLoading, +}: { + setAgents: Dispatch>; + setAgentLoading: Dispatch>; +}) => { + const inputRef = useRef(null); + const [isFocused, setIsFocused] = useState(false); + const api = useBackendAPI(); + + const debouncedSearch = useCallback( + debounce(async (searchTerm: string) => { + try { + setAgentLoading(true); + await new Promise((resolve) => setTimeout(resolve, 1000)); + const response = await api.librarySearchAgent(searchTerm); + setAgents(response.agents); + setAgentLoading(false); + } catch (error) { + console.error("Search failed:", error); + } + }, 300), + [setAgents], + ); + + const handleSearchInput = (e: React.ChangeEvent) => { + const searchTerm = e.target.value; + debouncedSearch(searchTerm); + }; + + return ( +
inputRef.current?.focus()} + className="relative z-[21] mx-auto flex h-[50px] w-full max-w-[500px] flex-1 cursor-pointer items-center rounded-[45px] bg-[#EDEDED] px-[24px] py-[10px]" + > +
+ + {!isFocused ? ( + + + + ) : ( + + { + if (inputRef.current) { + debouncedSearch(""); + inputRef.current.value = ""; + inputRef.current.blur(); + e.preventDefault(); + } + setIsFocused(false); + }} + /> + + )} + +
+ + setIsFocused(true)} + onBlur={() => !inputRef.current?.value && setIsFocused(false)} + onChange={handleSearchInput} + className="border-none font-sans text-[16px] font-normal leading-7 shadow-none focus:shadow-none" + type="text" + placeholder="Search agents" + /> +
+ ); +}; diff --git a/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx b/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx index e69de29bb2d1..30bf3455b9a1 100644 --- a/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx +++ b/autogpt_platform/frontend/src/components/agptui/LibraryUploadAgent.tsx @@ -0,0 +1,168 @@ +"use client"; +import { Upload, X } from "lucide-react"; +import { Button } from "./Button"; +import { useEffect, useState } from "react"; +import { motion, useAnimation } from "framer-motion"; +import { cn } from "@/lib/utils"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "../ui/dialog"; +import { Input } from "../ui/input"; +import { FileUploader } from "react-drag-drop-files"; + +const fileTypes = ["JSON"]; + +export const LibraryUploadAgent = () => { + const [scrolled, setScrolled] = useState(false); + const [file, setFile] = useState(null); + const [isDroped, setisDroped] = useState(false); + const controls = useAnimation(); + const handleChange = (file: File) => { + setTimeout(() => { + setisDroped(false); + }, 2000); + setFile(file); + setisDroped(false); + }; + + useEffect(() => { + const handleScroll = () => { + if (window.scrollY > 30) { + setScrolled(true); + } else { + setScrolled(false); + } + }; + + window.addEventListener("scroll", handleScroll); + return () => window.removeEventListener("scroll", handleScroll); + }, []); + + const handleUpload = () => { + // Add upload logic here + if (file) { + console.log("Uploading file:", file); + } + }; + + return ( + + + + + + + Upload Agent + + +
+ + + + {file ? ( +
+ {file.name} + +
+ ) : ( + { + setisDroped(true); + }} + onSelect={() => { + setisDroped(true); + }} + children={ +
+ {isDroped ? ( +
+
+
+ ) : ( + <> + Drop your agent here + or + Click to upload + + )} +
+ } + /> + )} + + +
+
+
+ ); +}; diff --git a/autogpt_platform/frontend/src/components/agptui/Navbar.tsx b/autogpt_platform/frontend/src/components/agptui/Navbar.tsx index 33d3fbf42acd..b315dc3a768e 100644 --- a/autogpt_platform/frontend/src/components/agptui/Navbar.tsx +++ b/autogpt_platform/frontend/src/components/agptui/Navbar.tsx @@ -57,7 +57,7 @@ export const Navbar = async ({ links, menuItemGroups }: NavbarProps) => { return ( <> -