Skip to content
Snippets Groups Projects
Commit a7b79c3a authored by Santosh's avatar Santosh
Browse files

adding WIP

parent 0ef49544
No related branches found
No related tags found
No related merge requests found
......@@ -2,6 +2,10 @@ from flask import Flask, render_template, request, redirect, url_for, flash, ses
import os
import boto3
import json
from constants import *
from utils import *
from lfu_checker import LFUChecker
def get_files():
directory = os.path.join(os.getcwd(), 'cache')
......@@ -62,7 +66,24 @@ def upload():
s3 = boto3.client('s3',aws_access_key_id=creds["access_key"], aws_secret_access_key=creds["secret"])
s3.upload_fileobj(file, "hnas", filename)
else:
print("Invalid Location")
with open("creds.json", "r") as f:
creds = json.load(f)
s3 = boto3.client('s3',aws_access_key_id=creds["access_key"], aws_secret_access_key=creds["secret"])
s3.upload_fileobj(file, "hnas", filename)
tot_size = get_total_files_size()
file_size = file.seek(0, os.SEEK_END)
if tot_size + file_size > CACHE_SIZE:
lfu_checker = LFUChecker(CACHE_SIZE)
lfu = lfu_checker.get_lfu()
lfu = dict(sorted(lfu.items(), key=lambda item: item[1]))
for key in lfu:
if key in get_files():
os.remove(os.path.join(os.getcwd(), 'cache', key))
if get_total_files_size() + file_size <= CACHE_SIZE:
break
file.seek(0)
file_path = os.path.join(os.getcwd(), 'cache', filename)
file.save(file_path)
return redirect(url_for('index'))
return render_template('upload.html')
......
CACHE_SIZE = 10**10
\ No newline at end of file
from constants import *
from utils import *
import json
class LFUChecker:
def __init__(self, cache_size):
self.cache = cache_size
def get_files(self):
local_files = set(get_files())
s3_files = set(get_s3_files())
return local_files, s3_files
def get_access_data(self):
with open("access_data.json", "r") as f:
access_data = json.load(f)
return access_data
\ No newline at end of file
utils.py 0 → 100644
import boto3
import json
import os
def get_total_files_size():
directory = os.path.join(os.getcwd(), 'cache')
total_size = 0
for filename in os.listdir(directory):
if os.path.isfile(os.path.join(directory, filename)):
total_size += os.path.getsize(os.path.join(directory, filename))
return total_size
def get_files():
directory = os.path.join(os.getcwd(), 'cache')
files = []
for filename in os.listdir(directory):
if os.path.isfile(os.path.join(directory, filename)):
files.append(filename)
print(files)
return files
def get_s3_files():
with open("creds.json", "r") as f:
creds = json.load(f)
s3 = boto3.client('s3',aws_access_key_id=creds["access_key"], aws_secret_access_key=creds["secret"])
s3_files = []
for key in s3.list_objects(Bucket=creds["bucket"])['Contents']:
s3_files.append(key['Key'])
return s3_files
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment