1
1
//! Base configuration options
2
2
//!
3
3
//! - `HEROKU`: Is this instance of crates_io:: currently running on Heroku.
4
- //! - `S3_BUCKET`: The S3 bucket used to store crate files. If not present during development,
5
- //! crates_io:: will fall back to a local uploader.
6
- //! - `S3_REGION`: The region in which the bucket was created. Optional if US standard.
7
- //! - `AWS_ACCESS_KEY`: The access key to interact with S3.
8
- //! - `AWS_SECRET_KEY`: The secret key to interact with S3.
9
- //! - `S3_CDN`: Optional CDN configuration for building public facing URLs.
10
4
11
- use crate :: { env , uploaders :: Uploader , Env } ;
5
+ use crate :: Env ;
12
6
13
7
pub struct Base {
14
8
pub env : Env ,
15
- pub uploader : Uploader ,
16
9
}
17
10
18
11
impl Base {
@@ -22,82 +15,6 @@ impl Base {
22
15
_ => Env :: Development ,
23
16
} ;
24
17
25
- let uploader = if env == Env :: Production {
26
- // `env` panics if these vars are not set, and in production for a primary instance,
27
- // that's what we want since we don't want to be able to start the server if the
28
- // server doesn't know where to upload crates.
29
- Self :: s3_panic_if_missing_keys ( )
30
- } else if dotenvy:: var ( "S3_BUCKET" ) . is_ok ( ) {
31
- // If we've set the `S3_BUCKET` variable to any value, use all of the values
32
- // for the related S3 environment variables and configure the app to upload to
33
- // and read from S3 like production does. All values except for bucket are
34
- // optional, like production read-only mirrors.
35
- info ! ( "Using S3 uploader" ) ;
36
- Self :: s3_maybe_read_only ( )
37
- } else {
38
- // If we don't set the `S3_BUCKET` variable, we'll use a development-only
39
- // uploader that makes it possible to run and publish to a locally-running
40
- // crates.io instance without needing to set up an account and a bucket in S3.
41
- info ! ( "Using local uploader, crate files will be in the local_uploads directory" ) ;
42
- Uploader :: Local
43
- } ;
44
-
45
- Self { env, uploader }
46
- }
47
-
48
- pub fn uploader ( & self ) -> & Uploader {
49
- & self . uploader
50
- }
51
-
52
- fn s3_panic_if_missing_keys ( ) -> Uploader {
53
- let index_bucket = match dotenvy:: var ( "S3_INDEX_BUCKET" ) {
54
- Ok ( name) => Some ( Box :: new ( s3:: Bucket :: new (
55
- name,
56
- dotenvy:: var ( "S3_INDEX_REGION" )
57
- . map_or_else ( |_err| s3:: Region :: Default , s3:: Region :: Region ) ,
58
- env ( "AWS_ACCESS_KEY" ) ,
59
- env ( "AWS_SECRET_KEY" ) ,
60
- "https" ,
61
- ) ) ) ,
62
- Err ( _) => None ,
63
- } ;
64
- Uploader :: S3 {
65
- bucket : Box :: new ( s3:: Bucket :: new (
66
- env ( "S3_BUCKET" ) ,
67
- dotenvy:: var ( "S3_REGION" )
68
- . map_or_else ( |_err| s3:: Region :: Default , s3:: Region :: Region ) ,
69
- env ( "AWS_ACCESS_KEY" ) ,
70
- env ( "AWS_SECRET_KEY" ) ,
71
- "https" ,
72
- ) ) ,
73
- index_bucket,
74
- cdn : dotenvy:: var ( "S3_CDN" ) . ok ( ) ,
75
- }
76
- }
77
-
78
- fn s3_maybe_read_only ( ) -> Uploader {
79
- let index_bucket = match dotenvy:: var ( "S3_INDEX_BUCKET" ) {
80
- Ok ( name) => Some ( Box :: new ( s3:: Bucket :: new (
81
- name,
82
- dotenvy:: var ( "S3_INDEX_REGION" )
83
- . map_or_else ( |_err| s3:: Region :: Default , s3:: Region :: Region ) ,
84
- dotenvy:: var ( "AWS_ACCESS_KEY" ) . unwrap_or_default ( ) ,
85
- dotenvy:: var ( "AWS_SECRET_KEY" ) . unwrap_or_default ( ) ,
86
- "https" ,
87
- ) ) ) ,
88
- Err ( _) => None ,
89
- } ;
90
- Uploader :: S3 {
91
- bucket : Box :: new ( s3:: Bucket :: new (
92
- env ( "S3_BUCKET" ) ,
93
- dotenvy:: var ( "S3_REGION" )
94
- . map_or_else ( |_err| s3:: Region :: Default , s3:: Region :: Region ) ,
95
- dotenvy:: var ( "AWS_ACCESS_KEY" ) . unwrap_or_default ( ) ,
96
- dotenvy:: var ( "AWS_SECRET_KEY" ) . unwrap_or_default ( ) ,
97
- "https" ,
98
- ) ) ,
99
- index_bucket,
100
- cdn : dotenvy:: var ( "S3_CDN" ) . ok ( ) ,
101
- }
18
+ Self { env }
102
19
}
103
20
}
0 commit comments