11import json
22import os
3+ import botocore
34import requests
45import boto3
56
67from conditional .models .models import MajorProject , MajorProjectSkill
78from conditional .util .user_dict import user_dict_is_eval_director
8- from config import AWS_ACCESS_KEY_ID , AWS_SECRET_ACCESS_KEY
99from flask import Blueprint
1010from flask import request
1111from flask import jsonify
2424
2525from conditional import db , start_of_year , get_user , auth , app
2626
27+ import collections
28+ collections .Callable = collections .abc .Callable
29+
2730logger = structlog .get_logger ()
2831
2932major_project_bp = Blueprint ("major_project_bp" , __name__ )
3033
34+ def list_files_in_folder (bucket_name , folder_prefix ):
35+
36+ s3 = boto3 .client (
37+ service_name = "s3" ,
38+ aws_access_key_id = app .config ['AWS_ACCESS_KEY_ID' ],
39+ aws_secret_access_key = app .config ['AWS_SECRET_ACCESS_KEY' ],
40+ endpoint_url = app .config ['S3_URI' ]
41+ )
42+
43+ try :
44+ response = s3 .list_objects (Bucket = bucket_name , Prefix = folder_prefix )
45+ if 'Contents' in response :
46+ return [obj ['Key' ] for obj in response ['Contents' ]]
47+ else :
48+ return []
49+
50+ except botocore .exceptions .ClientError as e :
51+ print (f"Error listing files in the folder: { e } " )
52+ return []
3153
3254@major_project_bp .route ("/major_project/" )
3355@auth .oidc_auth ("default" )
@@ -53,7 +75,14 @@ def display_major_project(user_dict=None):
5375 MajorProject .id == MajorProjectSkill .project_id
5476 ).group_by (MajorProject .id
5577 ).where (MajorProject .date >= start_of_year ()
56- ).order_by (MajorProject .date )
78+ ).order_by (desc (MajorProject .date ), desc (MajorProject .id ))
79+
80+ s3 = boto3 .client (
81+ service_name = "s3" ,
82+ aws_access_key_id = app .config ['AWS_ACCESS_KEY_ID' ],
83+ aws_secret_access_key = app .config ['AWS_SECRET_ACCESS_KEY' ],
84+ endpoint_url = app .config ['S3_URI' ]
85+ )
5786
5887 major_projects = [
5988 {
@@ -68,7 +97,8 @@ def display_major_project(user_dict=None):
6897 "desc" : p .description ,
6998 "links" : list (filter (None , p .links .split ("\n " ))),
7099 "status" : p .status ,
71- "is_owner" : bool (user_dict ["username" ] == p .uid )
100+ "is_owner" : bool (user_dict ["username" ] == p .uid ),
101+ "files" : list_files_in_folder ("major-project-media" , f"{ p .id } /" )
72102 }
73103 for p in proj_list
74104 ]
@@ -88,7 +118,7 @@ def upload_major_project_files(user_dict=None):
88118 log = logger .new (request = request , auth_dict = user_dict )
89119 log .info ('Uploading Major Project File(s)' )
90120
91- log .info (f"user_dict: { user_dict } " )
121+ # log.info(f"user_dict: {user_dict}")
92122
93123 if len (list (request .files .keys ())) < 1 :
94124 return "No file" , 400
@@ -168,27 +198,23 @@ def submit_major_project(user_dict=None):
168198 name = name .replace ("<!" , "<! " )
169199
170200 # Connect to S3 bucket
171- s3 = boto3 .resource (
172- "s3" ,
173- endpoint_url = "https://s3.csh.rit.edu" ,
174- aws_access_key_id = AWS_ACCESS_KEY_ID ,
175- aws_secret_access_key = AWS_SECRET_ACCESS_KEY
176- )
177-
178- bucket = s3 .create_bucket (Bucket = "major-project-media" )
179-
201+ s3 = boto3 .client ("s3" ,
202+ aws_access_key_id = app .config ['AWS_ACCESS_KEY_ID' ],
203+ aws_secret_access_key = app .config ['AWS_SECRET_ACCESS_KEY' ],
204+ endpoint_url = app .config ['S3_URI' ])
205+
180206 # Collect all the locally cached files and put them in the bucket
181- for file in os . listdir ( f"/tmp/{ user_id } " ):
182- filepath = f"/tmp/ { user_id } / { file } "
183-
184- # TODO: Remove this later
185- print ( f"Filepath in S3: { filepath } " )
186-
187- bucket . upload_file ( filepath , f" { project . id } -- { file } " )
188- os .remove (filepath )
207+ temp_dir = f"/tmp/{ user_id } "
208+ if os . path . exists ( temp_dir ):
209+ for file in os . listdir ( temp_dir ):
210+ filepath = f" { temp_dir } / { file } "
211+
212+ s3 . upload_file ( filepath , 'major-project-media' , f" { project . id } / { file } " )
213+
214+ os .remove (filepath )
189215
190- # Delete the temp directory once all the files have been stored in S3
191- os .rmdir (f"/tmp/ { user_id } " )
216+ # Delete the temp directory once all the files have been stored in S3
217+ os .rmdir (temp_dir )
192218
193219
194220 # Send the slack ping only after we know that the data was properly saved to the DB
0 commit comments