Skip to content

Commit

Permalink
Merge pull request #32 from clowder-framework/increment-view-download
Browse files Browse the repository at this point in the history
increment views on summary endpoint
  • Loading branch information
max-zilla authored Dec 17, 2021
2 parents e150339 + 934413a commit 6b72ee8
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 22 deletions.
42 changes: 24 additions & 18 deletions app/routers/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

clowder_bucket = os.getenv("MINIO_BUCKET_NAME", "clowder")


@router.post("", response_model=Dataset)
async def save_dataset(
dataset_info: Dataset,
Expand Down Expand Up @@ -67,33 +68,40 @@ async def get_dataset(dataset_id: str, db: MongoClient = Depends(dependencies.ge
return Dataset.from_mongo(dataset)
raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found")


@router.get("/{dataset_id}/files")
async def get_dataset_files(dataset_id: str, db: MongoClient = Depends(dependencies.get_db)):
async def get_dataset_files(
dataset_id: str, db: MongoClient = Depends(dependencies.get_db)
):
if (
dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)})
) is not None:
file_ids = dataset['files']
file_ids = dataset["files"]
files = []
for file_id in file_ids:
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
if (
file := await db["files"].find_one({"_id": ObjectId(file_id)})
) is not None:
files.append(ClowderFile.from_mongo(file))
return files
raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found")


@router.put("/{dataset_id}")
@router.put("/{dataset_id}", response_model=Dataset)
async def edit_dataset(
request: Request, dataset_id: str, db: MongoClient = Depends(dependencies.get_db)
dataset_id: str,
dataset_info: Dataset,
db: MongoClient = Depends(dependencies.get_db),
):
request_json = await request.json()
ds = dict(dataset_info) if dataset_info is not None else {}
if (
dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)})
) is not None:
try:
request_json["_id"] = dataset_id
request_json["modified"] = datetime.datetime.utcnow()
edited_dataset = Dataset.from_mongo(request_json)
db["datasets"].replace_one({"_id": ObjectId(dataset_id)}, edited_dataset)
dataset.update(ds)
dataset["_id"] = dataset_id
dataset["modified"] = datetime.datetime.utcnow()
db["datasets"].replace_one({"_id": ObjectId(dataset_id)}, dataset)
except Exception as e:
print(e)
return Dataset.from_mongo(dataset)
Expand All @@ -102,18 +110,16 @@ async def edit_dataset(

@router.delete("/{dataset_id}")
async def delete_dataset(
dataset_id: str, db: MongoClient = Depends(dependencies.get_db),
fs: Minio = Depends(dependencies.get_fs)
dataset_id: str,
db: MongoClient = Depends(dependencies.get_db),
fs: Minio = Depends(dependencies.get_fs),
):
if (
dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)})
) is not None:
dataset_files = dataset['files']
dataset_files = dataset["files"]
for f in dataset_files:
fs.remove_object(
clowder_bucket,
str(f)
)
fs.remove_object(clowder_bucket, str(f))
res = await db["datasets"].delete_one({"_id": ObjectId(dataset_id)})
return {"status": "deleted"}
return {"deleted": dataset_id}
raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found")
35 changes: 31 additions & 4 deletions app/routers/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def save_file(
db: MongoClient = Depends(dependencies.get_db),
fs: Minio = Depends(dependencies.get_fs),
file: UploadFile = File(...),
file_info: Optional[Json[ClowderFile]] = None,
file_info: Optional[ClowderFile] = None,
):
# First, add to database and get unique ID
f = dict(file_info) if file_info is not None else {}
Expand Down Expand Up @@ -83,6 +83,10 @@ async def download_file(
response.headers["Content-Disposition"] = (
"attachment; filename=%s" % file["name"]
)
# Increment download count
await db["files"].update_one(
{"_id": ObjectId(file_id)}, {"$inc": {"downloads": 1}}
)
return response


Expand All @@ -94,12 +98,15 @@ async def delete_file(
fs: Minio = Depends(dependencies.get_fs),
):
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
dataset = await db["datasets"].find_one({"files": ObjectId(file_id)})
if dataset is not None:
if (
dataset := await db["datasets"].find_one({"files": ObjectId(file_id)})
) is not None:
updated_dataset = await db["datasets"].update_one(
{"_id": ObjectId(dataset["id"])},
{"$push": {"files": ObjectId(file_id)}},
{"$pull": {"files": ObjectId(file_id)}},
)
# TODO: Error catching
removed_file = await db["files"].delete_one({"_id": ObjectId(file_id)})
fs.remove_object(settings.MINIO_BUCKET_NAME, str(file_id))
return {"deleted": file_id}
else:
Expand All @@ -111,5 +118,25 @@ async def get_file_summary(
file_id: str, db: MongoClient = Depends(dependencies.get_db)
):
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
# TODO: Incrementing too often (3x per page view)
# file["views"] += 1
# db["files"].replace_one({"_id": ObjectId(file_id)}, file)
return ClowderFile.from_mongo(file)
raise HTTPException(status_code=404, detail=f"File {file_id} not found")


@router.put("/{file_id}", response_model=ClowderFile)
async def edit_file(
file_info: ClowderFile, file_id: str, db: MongoClient = Depends(dependencies.get_db)
):
# TODO: Needs permissions checking here
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
try:
file.update(file_info)
# TODO: Disallow changing other fields such as author
file["_id"] = file_id
db["files"].replace_one({"_id": ObjectId(file_id)}, file)
except Exception as e:
print(e)
return ClowderFile.from_mongo(file)
raise HTTPException(status_code=404, detail=f"File {file_id} not found")

0 comments on commit 6b72ee8

Please sign in to comment.