Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
## Release notes

### 0.13.6 -- Jun 13, 2022
* Add - unified package level logger for package (#667) PR #1031
* Update - swap various datajoint messages, warnings, etc. to use the new logger. (#667) PR #1031

### 0.13.5 -- May 19, 2022
* Update - Import ABC from collections.abc for Python 3.10 compatibility
* Bugfix - Fix multiprocessing value error (#1013) PR #1026

### 0.13.4 -- March, 28 2022
### 0.13.4 -- Mar, 28 2022
* Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995
* Bugfix - Add missing `jobs` argument for multiprocessing PR #997
* Add - Test for multiprocessing PR #1008
Expand Down
2 changes: 1 addition & 1 deletion LNX-docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ services:
interval: 1s
fakeservices.datajoint.io:
<<: *net
image: datajoint/nginx:v0.1.1
image: datajoint/nginx:v0.2.1
environment:
- ADD_db_TYPE=DATABASE
- ADD_db_ENDPOINT=db:3306
Expand Down
1 change: 1 addition & 0 deletions datajoint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"key_hash",
]

from .logging import logger
from .version import __version__
from .settings import config
from .connection import conn, Connection
Expand Down
14 changes: 10 additions & 4 deletions datajoint/autopopulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

# noinspection PyExceptionInherit,PyCallingNonCallable

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


# --- helper functions for multiprocessing --
Expand Down Expand Up @@ -159,7 +159,7 @@ def populate(
max_calls=None,
display_progress=False,
processes=1,
make_kwargs=None
make_kwargs=None,
):
"""
``table.populate()`` calls ``table.make(key)`` for every primary key in
Expand Down Expand Up @@ -207,7 +207,7 @@ def handler(signum, frame):
elif order == "random":
random.shuffle(keys)

logger.info("Found %d keys to populate" % len(keys))
logger.debug("Found %d keys to populate" % len(keys))

keys = keys[:max_calls]
nkeys = len(keys)
Expand Down Expand Up @@ -275,7 +275,7 @@ def _populate1(
if jobs is not None:
jobs.complete(self.target.table_name, self._job_key(key))
else:
logger.info("Populating: " + str(key))
logger.debug(f"Making {key} -> {self.target.full_table_name}")
self.__class__._allow_insert = True
try:
make(dict(key), **(make_kwargs or {}))
Expand All @@ -288,6 +288,9 @@ def _populate1(
exception=error.__class__.__name__,
msg=": " + str(error) if str(error) else "",
)
logger.debug(
f"Error making {key} -> {self.target.full_table_name} - {error_message}"
)
if jobs is not None:
# show error name and error message (if any)
jobs.error(
Expand All @@ -303,6 +306,9 @@ def _populate1(
return key, error if return_exception_objects else error_message
else:
self.connection.commit_transaction()
logger.debug(
f"Success making {key} -> {self.target.full_table_name}"
)
if jobs is not None:
jobs.complete(self.target.table_name, self._job_key(key))
finally:
Expand Down
12 changes: 6 additions & 6 deletions datajoint/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from .hash import uuid_from_buffer
from .plugin import connection_plugins

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])
query_log_max_length = 300


Expand Down Expand Up @@ -187,7 +187,7 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None)
self.conn_info["ssl_input"] = use_tls
self.conn_info["host_input"] = host_input
self.init_fun = init_fun
print("Connecting {user}@{host}:{port}".format(**self.conn_info))
logger.info("Connecting {user}@{host}:{port}".format(**self.conn_info))
self._conn = None
self._query_cache = None
connect_host_hook(self)
Expand Down Expand Up @@ -341,7 +341,7 @@ def query(
except errors.LostConnectionError:
if not reconnect:
raise
warnings.warn("MySQL server has gone away. Reconnecting to the server.")
logger.warning("MySQL server has gone away. Reconnecting to the server.")
connect_host_hook(self)
if self._in_transaction:
self.cancel_transaction()
Expand Down Expand Up @@ -382,15 +382,15 @@ def start_transaction(self):
raise errors.DataJointError("Nested connections are not supported.")
self.query("START TRANSACTION WITH CONSISTENT SNAPSHOT")
self._in_transaction = True
logger.info("Transaction started")
logger.debug("Transaction started")

def cancel_transaction(self):
"""
Cancels the current transaction and rolls back all changes made during the transaction.
"""
self.query("ROLLBACK")
self._in_transaction = False
logger.info("Transaction cancelled. Rolling back ...")
logger.debug("Transaction cancelled. Rolling back ...")

def commit_transaction(self):
"""
Expand All @@ -399,7 +399,7 @@ def commit_transaction(self):
"""
self.query("COMMIT")
self._in_transaction = False
logger.info("Transaction committed and closed.")
logger.debug("Transaction committed and closed.")

# -------- context manager for transactions
@property
Expand Down
4 changes: 2 additions & 2 deletions datajoint/declare.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def match_type(attribute_type):
)


logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


def build_foreign_key_parser_old():
Expand Down Expand Up @@ -207,7 +207,7 @@ def compile_foreign_key(
)

if obsolete:
warnings.warn(
logger.warning(
'Line "{line}" uses obsolete syntax that will no longer be supported in datajoint 0.14. '
"For details, see issue #780 https://github.com/datajoint/datajoint-python/issues/780".format(
line=line
Expand Down
13 changes: 7 additions & 6 deletions datajoint/diagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,14 @@
import re
import functools
import io
import warnings
import logging
import inspect
from .table import Table
from .dependencies import unite_master_parts
from .user_tables import Manual, Imported, Computed, Lookup, Part
from .errors import DataJointError
from .table import lookup_class_name


try:
from matplotlib import pyplot as plt
Expand All @@ -21,11 +25,8 @@
except:
diagram_active = False

from .user_tables import Manual, Imported, Computed, Lookup, Part
from .errors import DataJointError
from .table import lookup_class_name


logger = logging.getLogger(__name__.split(".")[0])
user_table_classes = (Manual, Lookup, Computed, Imported, Part)


Expand Down Expand Up @@ -63,7 +64,7 @@ class Diagram:
"""

def __init__(self, *args, **kwargs):
warnings.warn(
logger.warning(
"Please install matplotlib and pygraphviz libraries to enable the Diagram feature."
)

Expand Down
2 changes: 1 addition & 1 deletion datajoint/expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)
from .declare import CONSTANT_LITERALS

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


class QueryExpression:
Expand Down
6 changes: 4 additions & 2 deletions datajoint/fetch.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from functools import partial
from pathlib import Path
import warnings
import logging
import pandas
import itertools
import re
Expand All @@ -12,6 +12,8 @@
from .settings import config
from .utils import safe_write

logger = logging.getLogger(__name__.split(".")[0])


class key:
"""
Expand Down Expand Up @@ -209,7 +211,7 @@ def __call__(
)

if limit is None and offset is not None:
warnings.warn(
logger.warning(
"Offset set, but no limit. Setting limit to a large number. "
"Consider setting a limit explicitly."
)
Expand Down
2 changes: 1 addition & 1 deletion datajoint/heading.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from .attribute_adapter import get_adapter, AttributeAdapter


logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])

default_attribute_properties = (
dict( # these default values are set in computed attributes
Expand Down
32 changes: 32 additions & 0 deletions datajoint/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import logging
import os
import sys
import io

logger = logging.getLogger(__name__.split(".")[0])

log_level = os.getenv("DJ_LOG_LEVEL", "info").upper()

log_format = logging.Formatter("[%(asctime)s][%(levelname)s]: %(message)s")

stream_handler = logging.StreamHandler() # default handler
stream_handler.setFormatter(log_format)

logger.setLevel(level=log_level)
logger.handlers = [stream_handler]


def excepthook(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return

if logger.getEffectiveLevel() == 10:
logger.debug(
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)
else:
logger.error(f"Uncaught exception: {exc_value}")


sys.excepthook = excepthook
11 changes: 7 additions & 4 deletions datajoint/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from pathlib import Path
from cryptography.exceptions import InvalidSignature
from otumat import hash_pkg, verify
import logging

logger = logging.getLogger(__name__.split(".")[0])


def _update_error_stack(plugin_name):
Expand All @@ -12,13 +15,13 @@ def _update_error_stack(plugin_name):
plugin_meta = pkg_resources.get_distribution(plugin_name)

data = hash_pkg(pkgpath=str(Path(plugin_meta.module_path, plugin_name)))
signature = plugin_meta.get_metadata("{}.sig".format(plugin_name))
pubkey_path = str(Path(base_meta.egg_info, "{}.pub".format(base_name)))
signature = plugin_meta.get_metadata(f"{plugin_name}.sig")
pubkey_path = str(Path(base_meta.egg_info, f"{base_name}.pub"))
verify(pubkey_path=pubkey_path, data=data, signature=signature)
print("DataJoint verified plugin `{}` detected.".format(plugin_name))
logger.info(f"DataJoint verified plugin `{plugin_name}` detected.")
return True
except (FileNotFoundError, InvalidSignature):
print("Unverified plugin `{}` detected.".format(plugin_name))
logger.warning(f"Unverified plugin `{plugin_name}` detected.")
return False


Expand Down
3 changes: 1 addition & 2 deletions datajoint/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
from io import BytesIO
import minio # https://docs.minio.io/docs/python-client-api-reference
import urllib3
import warnings
import uuid
import logging
from pathlib import Path
from . import errors

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


class Folder:
Expand Down
8 changes: 4 additions & 4 deletions datajoint/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from .table import lookup_class_name, Log, FreeTable
import types

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])


def ordered_dir(class_):
Expand Down Expand Up @@ -134,7 +134,7 @@ def activate(
)
)
# create database
logger.info("Creating schema `{name}`.".format(name=schema_name))
logger.debug("Creating schema `{name}`.".format(name=schema_name))
try:
self.connection.query(
"CREATE DATABASE `{name}`".format(name=schema_name)
Expand Down Expand Up @@ -360,12 +360,12 @@ def drop(self, force=False):
)
== "yes"
):
logger.info("Dropping `{database}`.".format(database=self.database))
logger.debug("Dropping `{database}`.".format(database=self.database))
try:
self.connection.query(
"DROP DATABASE `{database}`".format(database=self.database)
)
logger.info(
logger.debug(
"Schema `{database}` was dropped successfully.".format(
database=self.database
)
Expand Down
8 changes: 4 additions & 4 deletions datajoint/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
}
)

logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__.split(".")[0])
log_levels = {
"INFO": logging.INFO,
"WARNING": logging.WARNING,
Expand Down Expand Up @@ -104,7 +104,7 @@ def save(self, filename, verbose=False):
with open(filename, "w") as fid:
json.dump(self._conf, fid, indent=4)
if verbose:
print("Saved settings in " + filename)
logger.info("Saved settings in " + filename)

def load(self, filename):
"""
Expand Down Expand Up @@ -240,8 +240,8 @@ def __getitem__(self, key):
return self._conf[key]

def __setitem__(self, key, value):
logger.log(
logging.INFO, "Setting {0:s} to {1:s}".format(str(key), str(value))
logger.debug(
logging.DEBUG, "Setting {0:s} to {1:s}".format(str(key), str(value))
)
if validators[key](value):
self._conf[key] = value
Expand Down
Loading