Move CacheContent API to "cache.py" module

This commit is contained in:
Ivan Kravets
2020-08-22 20:05:14 +03:00
parent abae9c7e77
commit 7e4bfb1959
15 changed files with 215 additions and 192 deletions

View File

@ -147,7 +147,6 @@ Share minimal diagnostics and usage information to help us make PlatformIO bette
It is enabled by default. For more information see: It is enabled by default. For more information see:
* `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_ * `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
* `SSL Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
License License
------- -------

View File

@ -14,7 +14,6 @@
from __future__ import absolute_import from __future__ import absolute_import
import codecs
import getpass import getpass
import hashlib import hashlib
import json import json
@ -22,18 +21,12 @@ import os
import platform import platform
import socket import socket
import uuid import uuid
from os import environ, getenv, listdir, remove
from os.path import dirname, isdir, isfile, join, realpath from os.path import dirname, isdir, isfile, join, realpath
from time import time
from platformio import __version__, exception, fs, proc from platformio import __version__, exception, fs, proc
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio.package.lockfile import LockFile from platformio.package.lockfile import LockFile
from platformio.project.helpers import ( from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
get_default_projects_dir,
get_project_cache_dir,
get_project_core_dir,
)
def projects_dir_validate(projects_dir): def projects_dir_validate(projects_dir):
@ -63,10 +56,9 @@ DEFAULT_SETTINGS = {
"value": 7, "value": 7,
}, },
"enable_cache": { "enable_cache": {
"description": "Enable caching for API requests and Library Manager", "description": "Enable caching for HTTP API requests",
"value": True, "value": True,
}, },
"strict_ssl": {"description": "Strict SSL for PlatformIO Services", "value": False},
"enable_telemetry": { "enable_telemetry": {
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"), "description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True, "value": True,
@ -173,146 +165,6 @@ class State(object):
return item in self._storage return item in self._storage
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
self._db_path = None
self._lockfile = None
self.cache_dir = cache_dir or get_project_cache_dir()
self._db_path = join(self.cache_dir, "db.data")
def __enter__(self):
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
def _lock_dbindex(self):
if not self.cache_dir:
os.makedirs(self.cache_dir)
self._lockfile = LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return join(self.cache_dir, key[-2:], key)
@staticmethod
def key_from_args(*args):
h = hashlib.md5()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get(self, key):
cache_path = self.get_cache_path(key)
if not isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if isfile(cache_path):
self.delete(key)
if not data:
return False
if not isdir(self.cache_dir):
os.makedirs(self.cache_dir)
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
except UnicodeError:
if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
if not isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, path = line.split("=")
try:
if (
time() < int(expire)
and isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if isfile(path):
try:
remove(path)
if not listdir(dirname(path)):
fs.rmtree(dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not self.cache_dir or not isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def clean_cache():
with ContentCache() as cc:
cc.clean()
def sanitize_setting(name, value): def sanitize_setting(name, value):
if name not in DEFAULT_SETTINGS: if name not in DEFAULT_SETTINGS:
raise exception.InvalidSettingName(name) raise exception.InvalidSettingName(name)
@ -350,8 +202,8 @@ def delete_state_item(name):
def get_setting(name): def get_setting(name):
_env_name = "PLATFORMIO_SETTING_%s" % name.upper() _env_name = "PLATFORMIO_SETTING_%s" % name.upper()
if _env_name in environ: if _env_name in os.environ:
return sanitize_setting(name, getenv(_env_name)) return sanitize_setting(name, os.getenv(_env_name))
with State() as state: with State() as state:
if "settings" in state and name in state["settings"]: if "settings" in state and name in state["settings"]:
@ -388,7 +240,7 @@ def is_disabled_progressbar():
[ [
get_session_var("force_option"), get_session_var("force_option"),
proc.is_ci(), proc.is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true", os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
] ]
) )
@ -401,15 +253,15 @@ def get_cid():
if cid: if cid:
return cid return cid
uid = None uid = None
if getenv("C9_UID"): if os.getenv("C9_UID"):
uid = getenv("C9_UID") uid = os.getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")): elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
try: try:
uid = json.loads( uid = json.loads(
fetch_remote_content( fetch_remote_content(
"{api}/user?token={token}".format( "{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")), api=os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"), token=os.getenv("USER_TOKEN"),
) )
) )
).get("id") ).get("id")

165
platformio/cache.py Normal file
View File

@ -0,0 +1,165 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import hashlib
import os
from time import time
from platformio import app, fs
from platformio.compat import hashlib_encode_data
from platformio.package.lockfile import LockFile
from platformio.project.helpers import get_project_cache_dir
class ContentCache(object):
def __init__(self, namespace=None):
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
self._db_path = os.path.join(self.cache_dir, "db.data")
self._lockfile = None
if not os.path.isdir(self.cache_dir):
os.makedirs(self.cache_dir)
def __enter__(self):
# cleanup obsolete items
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
@staticmethod
def key_from_args(*args):
h = hashlib.sha1()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return os.path.join(self.cache_dir, key)
def get(self, key):
cache_path = self.get_cache_path(key)
if not os.path.isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not app.get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if os.path.isfile(cache_path):
self.delete(key)
if not data:
return False
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not os.path.isdir(os.path.dirname(cache_path)):
os.makedirs(os.path.dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
except UnicodeError:
if os.path.isfile(cache_path):
try:
os.remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
if not os.path.isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, fname = line.split("=")
path = os.path.join(self.cache_dir, fname)
try:
if (
time() < int(expire)
and os.path.isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if os.path.isfile(path):
try:
os.remove(path)
if not os.listdir(os.path.dirname(path)):
fs.rmtree(os.path.dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not os.path.isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def _lock_dbindex(self):
self._lockfile = LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
#
# Helpers
#
def cleanup_content_cache(namespace=None):
with ContentCache(namespace) as cc:
cc.clean()

View File

@ -153,9 +153,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
).get("auth_token") ).get("auth_token")
def forgot_password(self, username): def forgot_password(self, username):
return self.fetch_json_data( return self.fetch_json_data("post", "/v1/forgot", data={"username": username},)
"post", "/v1/forgot", data={"username": username},
)
def get_profile(self): def get_profile(self):
return self.send_auth_request("get", "/v1/profile",) return self.send_auth_request("get", "/v1/profile",)

View File

@ -20,9 +20,9 @@ import requests.adapters
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
from platformio import DEFAULT_REQUESTS_TIMEOUT, app, util from platformio import DEFAULT_REQUESTS_TIMEOUT, app, util
from platformio.cache import ContentCache
from platformio.exception import PlatformioException, UserSideException from platformio.exception import PlatformioException, UserSideException
PING_REMOTE_HOSTS = [ PING_REMOTE_HOSTS = [
"140.82.118.3", # Github.com "140.82.118.3", # Github.com
"35.231.145.151", # Gitlab.com "35.231.145.151", # Gitlab.com
@ -90,16 +90,20 @@ class HTTPClient(object):
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
raise HTTPClientError(str(e)) raise HTTPClientError(str(e))
def fetch_json_data(self, *args, **kwargs): def fetch_json_data(self, method, path, **kwargs):
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
if not cache_valid: if not cache_valid:
return self.raise_error_from_response(self.send_request(*args, **kwargs)) return self.raise_error_from_response(
cache_key = app.ContentCache.key_from_args(*args, kwargs) self.send_request(method, path, **kwargs)
with app.ContentCache() as cc: )
cache_key = ContentCache.key_from_args(
method, path, kwargs.get("params"), kwargs.get("data")
)
with ContentCache("http") as cc:
result = cc.get(cache_key) result = cc.get(cache_key)
if result is not None: if result is not None:
return json.loads(result) return json.loads(result)
response = self.send_request(*args, **kwargs) response = self.send_request(method, path, **kwargs)
cc.set(cache_key, response.text, cache_valid) cc.set(cache_key, response.text, cache_valid)
return self.raise_error_from_response(response) return self.raise_error_from_response(response)

View File

@ -26,7 +26,8 @@ from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import stdio # pylint: disable=import-error from twisted.internet import stdio # pylint: disable=import-error
from twisted.internet import task # pylint: disable=import-error from twisted.internet import task # pylint: disable=import-error
from platformio import app, fs, proc, telemetry, util from platformio import fs, proc, telemetry, util
from platformio.cache import ContentCache
from platformio.commands.debug import helpers from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.initcfgs import get_gdb_init_config from platformio.commands.debug.initcfgs import get_gdb_init_config
@ -252,7 +253,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
def _kill_previous_session(self): def _kill_previous_session(self):
assert self._session_id assert self._session_id
pid = None pid = None
with app.ContentCache() as cc: with ContentCache() as cc:
pid = cc.get(self._session_id) pid = cc.get(self._session_id)
cc.delete(self._session_id) cc.delete(self._session_id)
if not pid: if not pid:
@ -269,11 +270,11 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
def _lock_session(self, pid): def _lock_session(self, pid):
if not self._session_id: if not self._session_id:
return return
with app.ContentCache() as cc: with ContentCache() as cc:
cc.set(self._session_id, str(pid), "1h") cc.set(self._session_id, str(pid), "1h")
def _unlock_session(self): def _unlock_session(self):
if not self._session_id: if not self._session_id:
return return
with app.ContentCache() as cc: with ContentCache() as cc:
cc.delete(self._session_id) cc.delete(self._session_id)

View File

@ -17,15 +17,15 @@ import time
from twisted.internet import defer, reactor # pylint: disable=import-error from twisted.internet import defer, reactor # pylint: disable=import-error
from platformio import app from platformio.cache import ContentCache
from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.os import OSRPC
class MiscRPC(object): class MiscRPC(object):
def load_latest_tweets(self, data_url): def load_latest_tweets(self, data_url):
cache_key = app.ContentCache.key_from_args(data_url, "tweets") cache_key = ContentCache.key_from_args(data_url, "tweets")
cache_valid = "180d" cache_valid = "180d"
with app.ContentCache() as cc: with ContentCache() as cc:
cache_data = cc.get(cache_key) cache_data = cc.get(cache_key)
if cache_data: if cache_data:
cache_data = json.loads(cache_data) cache_data = json.loads(cache_data)
@ -43,7 +43,7 @@ class MiscRPC(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def _preload_latest_tweets(data_url, cache_key, cache_valid): def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((yield OSRPC.fetch_content(data_url))) result = json.loads((yield OSRPC.fetch_content(data_url)))
with app.ContentCache() as cc: with ContentCache() as cc:
cc.set( cc.set(
cache_key, cache_key,
json.dumps({"time": int(time.time()), "result": result}), json.dumps({"time": int(time.time()), "result": result}),

View File

@ -22,7 +22,8 @@ from functools import cmp_to_key
import click import click
from twisted.internet import defer # pylint: disable=import-error from twisted.internet import defer # pylint: disable=import-error
from platformio import DEFAULT_REQUESTS_TIMEOUT, app, fs, util from platformio import DEFAULT_REQUESTS_TIMEOUT, fs, util
from platformio.cache import ContentCache
from platformio.clients.http import ensure_internet_on from platformio.clients.http import ensure_internet_on
from platformio.commands.home import helpers from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding, glob_recursive from platformio.compat import PY2, get_filesystem_encoding, glob_recursive
@ -40,8 +41,8 @@ class OSRPC(object):
"Safari/603.3.8" "Safari/603.3.8"
) )
} }
cache_key = app.ContentCache.key_from_args(uri, data) if cache_valid else None cache_key = ContentCache.key_from_args(uri, data) if cache_valid else None
with app.ContentCache() as cc: with ContentCache() as cc:
if cache_key: if cache_key:
result = cc.get(cache_key) result = cc.get(cache_key)
if result is not None: if result is not None:
@ -63,7 +64,7 @@ class OSRPC(object):
r.raise_for_status() r.raise_for_status()
result = r.text result = r.text
if cache_valid: if cache_valid:
with app.ContentCache() as cc: with ContentCache() as cc:
cc.set(cache_key, result, cache_valid) cc.set(cache_key, result, cache_valid)
defer.returnValue(result) defer.returnValue(result)

View File

@ -16,7 +16,8 @@ import os
import click import click
from platformio import app, util from platformio import util
from platformio.cache import cleanup_content_cache
from platformio.commands.boards import print_boards from platformio.commands.boards import print_boards
from platformio.compat import dump_json_to_unicode from platformio.compat import dump_json_to_unicode
from platformio.package.manager.platform import PlatformPackageManager from platformio.package.manager.platform import PlatformPackageManager
@ -191,7 +192,9 @@ def platform_search(query, json_output):
def platform_frameworks(query, json_output): def platform_frameworks(query, json_output):
regclient = PlatformPackageManager().get_registry_client_instance() regclient = PlatformPackageManager().get_registry_client_instance()
frameworks = [] frameworks = []
for framework in regclient.fetch_json_data("get", "/v2/frameworks", cache_valid="1d"): for framework in regclient.fetch_json_data(
"get", "/v2/frameworks", cache_valid="1d"
):
if query == "all": if query == "all":
query = "" query = ""
search_data = dump_json_to_unicode(framework) search_data = dump_json_to_unicode(framework)
@ -401,7 +404,8 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments
return click.echo(dump_json_to_unicode(result)) return click.echo(dump_json_to_unicode(result))
# cleanup cached board and platform lists # cleanup cached board and platform lists
app.clean_cache() cleanup_content_cache("http")
for platform in platforms: for platform in platforms:
click.echo( click.echo(
"Platform %s" "Platform %s"

View File

@ -14,7 +14,7 @@
import click import click
from platformio import app from platformio.cache import cleanup_content_cache
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
from platformio.commands.lib.command import lib_update as cmd_lib_update from platformio.commands.lib.command import lib_update as cmd_lib_update
from platformio.commands.platform import platform_update as cmd_platform_update from platformio.commands.platform import platform_update as cmd_platform_update
@ -38,7 +38,7 @@ from platformio.package.manager.library import LibraryPackageManager
@click.pass_context @click.pass_context
def cli(ctx, core_packages, only_check, dry_run): def cli(ctx, core_packages, only_check, dry_run):
# cleanup lib search results, cached board and platform lists # cleanup lib search results, cached board and platform lists
app.clean_cache() cleanup_content_cache("http")
only_check = dry_run or only_check only_check = dry_run or only_check

View File

@ -20,6 +20,7 @@ import click
import semantic_version import semantic_version
from platformio import __version__, app, exception, fs, telemetry, util from platformio import __version__, app, exception, fs, telemetry, util
from platformio.cache import cleanup_content_cache
from platformio.clients import http from platformio.clients import http
from platformio.commands import PlatformioCLI from platformio.commands import PlatformioCLI
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
@ -160,7 +161,7 @@ def after_upgrade(ctx):
else: else:
click.secho("Please wait while upgrading PlatformIO...", fg="yellow") click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
try: try:
app.clean_cache() cleanup_content_cache("http")
except: # pylint: disable=bare-except except: # pylint: disable=bare-except
pass pass

View File

@ -27,7 +27,7 @@ class PackageManagerDownloadMixin(object):
DOWNLOAD_CACHE_EXPIRE = 86400 * 30 # keep package in a local cache for 1 month DOWNLOAD_CACHE_EXPIRE = 86400 * 30 # keep package in a local cache for 1 month
def compute_download_path(self, *args): def compute_download_path(self, *args):
request_hash = hashlib.new("sha256") request_hash = hashlib.new("sha1")
for arg in args: for arg in args:
request_hash.update(compat.hashlib_encode_data(arg)) request_hash.update(compat.hashlib_encode_data(arg))
dl_path = os.path.join(self.get_download_dir(), request_hash.hexdigest()) dl_path = os.path.join(self.get_download_dir(), request_hash.hexdigest())

View File

@ -16,10 +16,10 @@ import os
import click import click
from platformio.clients.http import ensure_internet_on
from platformio.package.exception import UnknownPackageError from platformio.package.exception import UnknownPackageError
from platformio.package.meta import PackageItem, PackageOutdatedResult, PackageSpec from platformio.package.meta import PackageItem, PackageOutdatedResult, PackageSpec
from platformio.package.vcsclient import VCSBaseException, VCSClientFactory from platformio.package.vcsclient import VCSBaseException, VCSClientFactory
from platformio.clients.http import ensure_internet_on
class PackageManagerUpdateMixin(object): class PackageManagerUpdateMixin(object):

View File

@ -17,7 +17,7 @@ import os
import subprocess import subprocess
import sys import sys
from platformio import __core_packages__, fs, exception, util from platformio import __core_packages__, exception, fs, util
from platformio.compat import PY2 from platformio.compat import PY2
from platformio.package.manager.tool import ToolPackageManager from platformio.package.manager.tool import ToolPackageManager
from platformio.package.meta import PackageSpec from platformio.package.meta import PackageSpec

View File

@ -123,9 +123,7 @@ class MeasurementProtocol(TelemetryBase):
caller_id = str(app.get_session_var("caller_id")) caller_id = str(app.get_session_var("caller_id"))
self["cd1"] = util.get_systype() self["cd1"] = util.get_systype()
self["cd4"] = ( self["cd4"] = 1 if (not is_ci() and (caller_id or not is_container())) else 0
1 if (not is_ci() and (caller_id or not is_container())) else 0
)
if caller_id: if caller_id:
self["cd5"] = caller_id.lower() self["cd5"] = caller_id.lower()