-
Notifications
You must be signed in to change notification settings - Fork 35
Expand file tree
/
Copy pathbuckets.py
More file actions
54 lines (40 loc) · 1.54 KB
/
buckets.py
File metadata and controls
54 lines (40 loc) · 1.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import datetime
from odoo import fields, models
try:
from boto3 import session
from boto3 import resource
from boto3 import client
except ImportError:
_logger.error('Cannot import Boto3', exc_info=True)
class AwsS3Analytics(models.Model):
_name = 'aws.s3.analytics'
_description = 'AWS S3 Analytics'
_rec_name = 'bucket_name'
bucket_name = fields.Char(string="Bucket", readonly=True)
object_line_ids = fields.One2many('object.lines', 'relation_id', string='Object Lines', readonly=True)
def list_buckets(self):
session = boto3.Session(
aws_access_key_id=(self.env.company.aws_access_key_id_o),
aws_secret_access_key=(self.env.company.aws_secret_access_key_o),
)
s3 = session.resource('s3')
lista = []
for bucket in s3.buckets.all():
lista.append(bucket)
self.create_bucket(lista)
def create_bucket(self, lista):
for i in lista:
name_dir = i.name
vals = {
"bucket_name": name_dir,
}
self.create(vals)
self.env.cr.commit()
class ObjectLines(models.Model):
_name = 'object.lines'
_description = 'Object lines'
bucket_name_id = fields.Many2one('aws.s3.analytics', string='Bucket')
bucket_object = fields.Char(string="Objeto")
last_modified = fields.Datetime('Ultima modificação do Obj')
disk_usage = fields.Float(string="Uso de disco", digits='Disk usage')
relation_id = fields.Many2one('aws.s3.analytics', string='IDS')