Coverage for flair/flair/__init__.py: 91%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import os
2import torch
3from pathlib import Path
4from transformers import set_seed as hf_set_seed
6# global variable: cache_root
7cache_root = Path(os.getenv('FLAIR_CACHE_ROOT', Path(Path.home(), ".flair")))
9# global variable: device
10device = None
11if torch.cuda.is_available():
12 device = torch.device("cuda:0")
13else:
14 device = torch.device("cpu")
16# global variable: embedding_storage_mode
17embedding_storage_mode = "default"
19from . import data
20from . import models
21from . import visual
22from . import trainers
23from . import nn
24from .training_utils import AnnealOnPlateau
26import logging.config
28__version__ = "0.9"
30logging.config.dictConfig(
31 {
32 "version": 1,
33 "disable_existing_loggers": False,
34 "formatters": {"standard": {"format": "%(asctime)-15s %(message)s"}},
35 "handlers": {
36 "console": {
37 "level": "INFO",
38 "class": "logging.StreamHandler",
39 "formatter": "standard",
40 "stream": "ext://sys.stdout",
41 }
42 },
43 "loggers": {
44 "flair": {"handlers": ["console"], "level": "INFO", "propagate": False}
45 },
46 }
47)
49logger = logging.getLogger("flair")
51def set_seed(seed: int):
52 hf_set_seed(seed)