Skip to content

Mostly switch to chrono #781

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 4, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 5 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ log = "0.4"
regex = "1"
structopt = "0.3"
crates-index-diff = "7"
time = "0.1"
reqwest = "0.9"
semver = "0.9"
slug = "=0.1.1"
Expand Down Expand Up @@ -59,6 +58,10 @@ tera = { version = "1.3.0", features = ["builtins"] }
arc-swap = "0.4.6"
notify = "4.0.15"

# Date and Time utilities
chrono = { version = "0.4.11", features = ["serde"] }
time = "0.1" # TODO: Remove once `iron` is removed

[target.'cfg(not(windows))'.dependencies]
libc = "0.2"

Expand All @@ -70,7 +73,7 @@ path-slash = "0.1.1"

[dependencies.postgres]
version = "0.15"
features = ["with-time", "with-serde_json"]
features = ["with-chrono", "with-serde_json"]

[dev-dependencies]
once_cell = "1.2.0"
Expand Down
2 changes: 1 addition & 1 deletion src/db/add_package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ pub(crate) fn add_package_into_database(
&[
&crate_id,
&metadata_pkg.version,
&cratesio_data.release_time,
&cratesio_data.release_time.naive_utc(),
&serde_json::to_value(&dependencies)?,
&metadata_pkg.package_name(),
&cratesio_data.yanked,
Expand Down
4 changes: 2 additions & 2 deletions src/db/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ use std::path::{Path, PathBuf};

pub(crate) use crate::storage::Blob;

pub(crate) fn get_path(conn: &Connection, path: &str) -> Option<Blob> {
Storage::new(conn).get(path).ok()
pub(crate) fn get_path(conn: &Connection, path: &str) -> Result<Blob> {
Storage::new(conn).get(path)
}

/// Store all files in a directory and return [[mimetype, filename]] as Json
Expand Down
32 changes: 15 additions & 17 deletions src/index/api.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
use std::io::Read;

use crate::{error::Result, utils::MetadataPackage};

use chrono::{DateTime, Utc};
use failure::err_msg;
use reqwest::{header::ACCEPT, Client};
use semver::Version;
use serde_json::Value;
use time::Timespec;
use std::io::Read;

pub(crate) struct RegistryCrateData {
pub(crate) release_time: Timespec,
pub(crate) release_time: DateTime<Utc>,
pub(crate) yanked: bool,
pub(crate) downloads: i32,
pub(crate) owners: Vec<CrateOwner>,
Expand Down Expand Up @@ -36,18 +35,17 @@ impl RegistryCrateData {
}

/// Get release_time, yanked and downloads from the registry's API
fn get_release_time_yanked_downloads(pkg: &MetadataPackage) -> Result<(time::Timespec, bool, i32)> {
fn get_release_time_yanked_downloads(pkg: &MetadataPackage) -> Result<(DateTime<Utc>, bool, i32)> {
let url = format!("https://crates.io/api/v1/crates/{}/versions", pkg.name);
// FIXME: There is probably better way to do this
// and so many unwraps...
let client = Client::new();
let mut res = client
.get(&url[..])
.header(ACCEPT, "application/json")
.send()?;
let mut res = client.get(&url).header(ACCEPT, "application/json").send()?;

let mut body = String::new();
res.read_to_string(&mut body).unwrap();
let json: Value = serde_json::from_str(&body[..])?;
res.read_to_string(&mut body)?;

let json: Value = serde_json::from_str(&body)?;
let versions = json
.as_object()
.and_then(|o| o.get("versions"))
Expand All @@ -65,15 +63,15 @@ fn get_release_time_yanked_downloads(pkg: &MetadataPackage) -> Result<(time::Tim
.and_then(|v| v.as_str())
.ok_or_else(|| err_msg("Not a JSON object"))?;

if semver::Version::parse(version_num).unwrap().to_string() == pkg.version {
if Version::parse(version_num)?.to_string() == pkg.version {
let release_time_raw = version
.get("created_at")
.and_then(|c| c.as_str())
.ok_or_else(|| err_msg("Not a JSON object"))?;

release_time = Some(
time::strptime(release_time_raw, "%Y-%m-%dT%H:%M:%S")
.unwrap()
.to_timespec(),
DateTime::parse_from_str(release_time_raw, "%Y-%m-%dT%H:%M:%S%.f%:z")?
.with_timezone(&Utc),
);

yanked = Some(
Expand All @@ -95,7 +93,7 @@ fn get_release_time_yanked_downloads(pkg: &MetadataPackage) -> Result<(time::Tim
}

Ok((
release_time.unwrap_or_else(time::get_time),
release_time.unwrap_or_else(Utc::now),
yanked.unwrap_or(false),
downloads.unwrap_or(0),
))
Expand Down
11 changes: 7 additions & 4 deletions src/storage/database.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use super::Blob;
use chrono::{DateTime, NaiveDateTime, Utc};
use failure::{Error, Fail};
use postgres::{transaction::Transaction, Connection};

Expand Down Expand Up @@ -28,11 +29,12 @@ impl<'a> DatabaseBackend<'a> {
Ok(Blob {
path: row.get("path"),
mime: row.get("mime"),
date_updated: row.get("date_updated"),
date_updated: DateTime::from_utc(row.get::<_, NaiveDateTime>("date_updated"), Utc),
content: row.get("content"),
})
}
}

pub(super) fn store_batch(&self, batch: &[Blob], trans: &Transaction) -> Result<(), Error> {
for blob in batch {
trans.query(
Expand All @@ -50,21 +52,22 @@ impl<'a> DatabaseBackend<'a> {
#[cfg(test)]
mod tests {
use super::*;
use time::Timespec;
use chrono::{SubsecRound, Utc};

#[test]
fn test_path_get() {
crate::test::wrapper(|env| {
let conn = env.db().conn();
let backend = DatabaseBackend::new(&conn);
let now = Utc::now();

// Add a test file to the database
conn.execute(
"INSERT INTO files (path, mime, date_updated, content) VALUES ($1, $2, $3, $4);",
&[
&"dir/foo.txt",
&"text/plain",
&Timespec::new(42, 0),
&now.naive_utc(),
&"Hello world!".as_bytes(),
],
)?;
Expand All @@ -74,7 +77,7 @@ mod tests {
Blob {
path: "dir/foo.txt".into(),
mime: "text/plain".into(),
date_updated: Timespec::new(42, 0),
date_updated: now.trunc_subsecs(6),
content: "Hello world!".bytes().collect(),
},
backend.get("dir/foo.txt")?
Expand Down
14 changes: 7 additions & 7 deletions src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@ pub(crate) mod s3;

pub(crate) use self::database::DatabaseBackend;
pub(crate) use self::s3::S3Backend;
use failure::Error;
use time::Timespec;

use failure::err_msg;
use chrono::{DateTime, Utc};
use failure::{err_msg, Error};
use postgres::{transaction::Transaction, Connection};
use std::collections::HashMap;
use std::ffi::OsStr;
Expand All @@ -20,7 +18,7 @@ const MAX_CONCURRENT_UPLOADS: usize = 1000;
pub(crate) struct Blob {
pub(crate) path: String,
pub(crate) mime: String,
pub(crate) date_updated: Timespec,
pub(crate) date_updated: DateTime<Utc>,
pub(crate) content: Vec<u8>,
}

Expand Down Expand Up @@ -130,7 +128,7 @@ impl<'a> Storage<'a> {
mime: mime.to_string(),
content,
// this field is ignored by the backend
date_updated: Timespec::new(0, 0),
date_updated: Utc::now(),
})
});
loop {
Expand Down Expand Up @@ -281,9 +279,10 @@ mod test {
mime: "text/rust".into(),
content: "fn main() {}".into(),
path: format!("{}.rs", i),
date_updated: Timespec::new(42, 0),
date_updated: Utc::now(),
})
.collect();

test_roundtrip(&uploads);
}

Expand All @@ -297,6 +296,7 @@ mod test {
let files = get_file_list(env::current_dir().unwrap().join("Cargo.toml")).unwrap();
assert_eq!(files[0], std::path::Path::new("Cargo.toml"));
}

#[test]
fn test_mime_types() {
check_mime(".gitignore", "text/plain");
Expand Down
25 changes: 17 additions & 8 deletions src/storage/s3.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use super::Blob;
use chrono::{DateTime, NaiveDateTime, Utc};
use failure::Error;
use futures::Future;
use log::{error, warn};
Expand All @@ -7,7 +8,6 @@ use rusoto_credential::DefaultCredentialsProvider;
use rusoto_s3::{GetObjectRequest, PutObjectRequest, S3Client, S3};
use std::convert::TryInto;
use std::io::Read;
use time::Timespec;
use tokio::runtime::Runtime;

#[cfg(test)]
Expand Down Expand Up @@ -100,9 +100,13 @@ impl<'a> S3Backend<'a> {
}
}

fn parse_timespec(raw: &str) -> Result<Timespec, Error> {
const TIME_FMT: &str = "%a, %d %b %Y %H:%M:%S %Z";
Ok(time::strptime(raw, TIME_FMT)?.to_timespec())
fn parse_timespec(mut raw: &str) -> Result<DateTime<Utc>, Error> {
raw = raw.trim_end_matches(" GMT");

Ok(DateTime::from_utc(
NaiveDateTime::parse_from_str(raw, "%a, %d %b %Y %H:%M:%S")?,
Utc,
))
}

pub(crate) fn s3_client() -> Option<S3Client> {
Expand All @@ -111,13 +115,15 @@ pub(crate) fn s3_client() -> Option<S3Client> {
if std::env::var_os("AWS_ACCESS_KEY_ID").is_none() && std::env::var_os("FORCE_S3").is_none() {
return None;
}

let creds = match DefaultCredentialsProvider::new() {
Ok(creds) => creds,
Err(err) => {
warn!("failed to retrieve AWS credentials: {}", err);
return None;
}
};

Some(S3Client::new_with(
rusoto_core::request::HttpClient::new().unwrap(),
creds,
Expand All @@ -135,18 +141,19 @@ pub(crate) fn s3_client() -> Option<S3Client> {
pub(crate) mod tests {
use super::*;
use crate::test::*;
use chrono::TimeZone;
use std::slice;

#[test]
fn test_parse_timespec() {
// Test valid conversions
assert_eq!(
parse_timespec("Thu, 1 Jan 1970 00:00:00 GMT").unwrap(),
Timespec::new(0, 0)
Utc.ymd(1970, 1, 1).and_hms(0, 0, 0),
);
assert_eq!(
parse_timespec("Mon, 16 Apr 2018 04:33:50 GMT").unwrap(),
Timespec::new(1523853230, 0)
Utc.ymd(2018, 4, 16).and_hms(4, 33, 50),
);

// Test invalid conversion
Expand All @@ -159,7 +166,7 @@ pub(crate) mod tests {
let blob = Blob {
path: "dir/foo.txt".into(),
mime: "text/plain".into(),
date_updated: Timespec::new(42, 0),
date_updated: Utc::now(),
content: "Hello world!".into(),
};

Expand Down Expand Up @@ -189,15 +196,17 @@ pub(crate) mod tests {
"parent/child",
"h/i/g/h/l/y/_/n/e/s/t/e/d/_/d/i/r/e/c/t/o/r/i/e/s",
];

let blobs: Vec<_> = names
.iter()
.map(|&path| Blob {
path: path.into(),
mime: "text/plain".into(),
date_updated: Timespec::new(42, 0),
date_updated: Utc::now(),
content: "Hello world!".into(),
})
.collect();

s3.upload(&blobs).unwrap();
for blob in &blobs {
s3.assert_blob(blob, &blob.path);
Expand Down
5 changes: 3 additions & 2 deletions src/test/fakes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use super::TestDatabase;
use crate::docbuilder::BuildResult;
use crate::index::api::RegistryCrateData;
use crate::utils::{Dependency, MetadataPackage, Target};
use chrono::{DateTime, Utc};
use failure::Error;

#[must_use = "FakeRelease does nothing until you call .create()"]
Expand Down Expand Up @@ -54,7 +55,7 @@ impl<'a> FakeRelease<'a> {
doc_targets: Vec::new(),
default_target: None,
registry_crate_data: RegistryCrateData {
release_time: time::get_time(),
release_time: Utc::now(),
yanked: false,
downloads: 0,
owners: Vec::new(),
Expand All @@ -74,7 +75,7 @@ impl<'a> FakeRelease<'a> {
self
}

pub(crate) fn release_time(mut self, new: time::Timespec) -> Self {
pub(crate) fn release_time(mut self, new: DateTime<Utc>) -> Self {
self.registry_crate_data.release_time = new;
self
}
Expand Down
6 changes: 4 additions & 2 deletions src/utils/daemon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::{
utils::{github_updater, pubsubhubbub, update_release_activity},
DocBuilder, DocBuilderOptions,
};
use chrono::{Timelike, Utc};
use log::{debug, error, info, warn};
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::path::PathBuf;
Expand Down Expand Up @@ -221,8 +222,9 @@ pub fn start_daemon(background: bool) {
.name("release activity updater".to_string())
.spawn(move || loop {
thread::sleep(Duration::from_secs(60));
let now = time::now();
if now.tm_hour == 23 && now.tm_min == 55 {
let now = Utc::now();

if now.hour() == 23 && now.minute() == 55 {
info!("Updating release activity");
if let Err(e) = update_release_activity() {
error!("Failed to update release activity: {}", e);
Expand Down
Loading