8000 removed natural language code · phamking/create-fastapi-project@2a9c9d4 · GitHub
[go: up one dir, main page]

Skip to content

Commit 2a9c9d4

Browse files
committed
removed natural language code
1 parent 364604a commit 2a9c9d4

File tree

4 files changed

+0
-159
lines changed

4 files changed

+0
-159
lines changed

create_fastapi_project/templates/full/backend/app/app/api/celery_task.py

Lines changed: 0 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -6,50 +6,6 @@
66
from app.models.hero_model import Hero
77
from app.db.session import SessionLocal
88
from asyncer import runnify
9-
import logging
10-
from celery import Task
11-
from transformers import pipeline
12-
13-
14-
class PredictTransformersPipelineTask(Task):
15-
"""
16-
Abstraction of Celery's Task class to support loading transformers model.
17-
"""
18-
19-
task_name = ""
20-
model_name = ""
21-
abstract = True
22-
23-
def __init__(self):
24-
super().__init__()
25-
self.pipeline 8000 = None
26-
27-
def __call__(self, *args, **kwargs):
28-
"""
29-
Load pipeline on first call (i.e. first task processed)
30-
Avoids the need to load pipeline on each task request
31-
"""
32-
if not self.pipeline:
33-
logging.info("Loading pipeline...")
34-
self.pipeline = pipeline(self.task_name, model=self.model_name)
35-
logging.info("Pipeline loaded")
36-
return self.run(*args, **kwargs)
37-
38-
39-
@celery.task(
40-
ignore_result=False,
41-
bind=True,
42-
base=PredictTransformersPipelineTask,
43-
task_name="text-generation",
44-
model_name="gpt2",
45-
name="tasks.predict_transformers_pipeline",
46-
)
47-
def predict_transformers_pipeline(self, prompt: str):
48-
"""
49-
Essentially the run method of PredictTask
50-
"""
51-
result = self.pipeline(prompt)
52-
return result
539

5410

5511
@celery.task(name="tasks.increment")

create_fastapi_project/templates/full/backend/app/app/api/v1/api.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from fastapi import APIRouter
22
from app.api.v1.endpoints import (
3-
natural_language,
43
user,
54
hero,
65
team,
@@ -23,9 +22,6 @@
2322
api_router.include_router(cache.router, prefix="/cache", tags=["cache"])
2423
api_router.include_router(weather.router, prefix="/weather", tags=["weather"])
2524
api_router.include_router(report.router, prefix="/report", tags=["report"])
26-
api_router.include_router(
27-
natural_language.router, prefix="/natural_language", tags=["natural_language"]
28-
)
2925
api_router.include_router(
3026
periodic_tasks.router, prefix="/periodic_tasks", tags=["periodic_tasks"]
3127
)

create_fastapi_project/templates/full/backend/app/app/api/v1/endpoints/natural_language.py

Lines changed: 0 additions & 101 deletions
This file was deleted.

create_fastapi_project/templates/full/backend/app/app/main.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -76,21 +76,11 @@ async def lifespan(app: FastAPI):
7676
FastAPICache.init(RedisBackend(redis_client), prefix="fastapi-cache")
7777
await FastAPILimiter.init(redis_client, identifier=user_id_identifier)
7878

79-
# Load a pre-trained sentiment analysis model as a dictionary to an easy cleanup
80-
models: dict[str, Any] = {
81-
"sentiment_model": pipeline(
82-
"sentiment-analysis",
83-
model="distilbert-base-uncased-finetuned-sst-2-english",
84-
),
85-
}
86-
g.set_default("sentiment_model", models["sentiment_model"])
8779
print("startup fastapi")
8880
yield
8981
# shutdown
9082
await FastAPICache.clear()
9183
await FastAPILimiter.close()
92-
models.clear()
93-
g.cleanup()
9484
gc.collect()
9585

9686

0 commit comments

Comments
 (0)
0