Google Firebase SSL Certificate - My certificate has a large number of other websites listed - firebase

Problem: Other domains are listed in my Google Firebase SSL certificate.
I created a firebase project to test firebase authentication emails from Cloud Functions. firebase.jhanley.com
I have separate code that runs in Cloud Functions that validates SSL certificates for each domain that I own / manage (code below). The primary purpose of this code is to send an email when a domain's SSL certificate is about to expire. Some of our SSL certificates must be renewed manually.
The problem is that my code that checks the SSL certificate is returning a huge number of other domain names for my SSL certificate. When I look at the SSL certificate with Chrome, I also see these other domain names. I do want my site associated with these other sites.
A reduced list of the domains that I see in my SSL certificate for Firebase:
2r5consultoria.com.br
addlix.com
admin.migrationcover.ae
admin.thermoply.com
akut-med.zeitnachweise.de
...
firebase.jhanley.com
...
Q) Why is this happening with Firebase SSL and is there a solution?
Q) Does Firebase support installing your own SSL certificate?
Python 3.x code that runs in Cloud Functions that processes SSL certificates by connecting to each domain name from a list.
Note: This code does not have any (known) problems. I am including the source code to create added value for others in the community.
""" Routines to process SSL certificates """
import sys
import datetime
import socket
import ssl
import time
import myhtml
g_email_required = False # This is set during processing if a warning or error was detected
def get_email_requred():
return g_email_required
def ssl_get_cert(hostname):
""" This function returns an SSL certificate from a host """
context = ssl.create_default_context()
conn = context.wrap_socket(
socket.socket(socket.AF_INET),
server_hostname=hostname)
# 3 second timeout because Google Cloud Functions has runtime limitations
conn.settimeout(3.0)
try:
conn.connect((hostname, 443))
except Exception as ex:
print("{}: Exception: {}".format(hostname, ex), file=sys.stderr)
return False, str(ex)
host_ssl_info = conn.getpeercert()
return host_ssl_info, ''
def get_ssl_info(host):
""" This function retrieves the SSL certificate for host """
# If we receive an error, retry up to three times waiting 10 seconds each time.
retry = 0
err = ''
while retry < 3:
ssl_info, err = ssl_get_cert(host)
if ssl_info is not False:
return ssl_info, ''
retry += 1
print(' retrying ...')
time.sleep(10)
return False, err
def get_ssl_issuer_name(ssl_info):
""" Return the IssuerName from the SSL certificate """
issuerName = ''
issuer = ssl_info['issuer']
# pylint: disable=line-too-long
# issuer looks like this:
# This is a set of a set of a set of key / value pairs.
# ((('countryName', 'US'),), (('organizationName', "Let's Encrypt"),), (('commonName', "Let's Encrypt Authority X3"),))
for item in issuer:
# item will look like this as it goes thru the issuer set
# Note that this is a set of a set
#
# (('countryName', 'US'),)
# (('organizationName', "Let's Encrypt"),)
# (('commonName', "Let's Encrypt Authority X3"),)
s = item[0]
# s will look like this as it goes thru the isser set
# Note that this is now a set
#
# ('countryName', 'US')
# ('organizationName', "Let's Encrypt")
# ('commonName', "Let's Encrypt Authority X3")
# break the set into "key" and "value" pairs
k = s[0]
v = s[1]
if k == 'organizationName':
if v != '':
issuerName = v
continue
if k == 'commonName':
if v != '':
issuerName = v
return issuerName
def get_ssl_subject_alt_names(ssl_info):
""" Return the Subject Alt Names """
altNames = ''
subjectAltNames = ssl_info['subjectAltName']
index = 0
for item in subjectAltNames:
altNames += item[1]
index += 1
if index < len(subjectAltNames):
altNames += ', '
return altNames
def process_hostnames(msg_body, hostnames, days_left):
""" Process the SSL certificate for each hostname """
# pylint: disable=global-statement
global g_email_required
ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'
for host in hostnames:
f_expired = False
print('Processing host:', host)
ssl_info, err = get_ssl_info(host)
if ssl_info is False:
msg_body = myhtml.add_row(msg_body, host, err, '', '', '', True)
g_email_required = True
continue
#print(ssl_info)
issuerName = get_ssl_issuer_name(ssl_info)
altNames = get_ssl_subject_alt_names(ssl_info)
l_expires = datetime.datetime.strptime(ssl_info['notAfter'], ssl_date_fmt)
remaining = l_expires - datetime.datetime.utcnow()
if remaining < datetime.timedelta(days=0):
# cert has already expired - uhoh!
cert_status = "Expired"
f_expired = True
g_email_required = True
elif remaining < datetime.timedelta(days=days_left):
# expires sooner than the buffer
cert_status = "Time to Renew"
f_expired = True
g_email_required = True
else:
# everything is fine
cert_status = "OK"
f_expired = False
msg_body = myhtml.add_row(msg_body, host, cert_status, str(l_expires), issuerName, altNames, f_expired)
return msg_body

This is happening because Firebase will automatically create shared certificates for customers. This does not represent a security risk for your site, as Firebase retains full control of the certificate private keys. Certificates are shared to allow us to offer HTTPS + custom domains without an additional fee for our free plan customers.
If you are on the Blaze (pay-as-you-go) plan for your project, you can send a request to Firebase support and we can migrate you to a dedicated certificate. This is only available for Blaze plan customers.
Firebase Hosting does not currently support uploading custom certificates. If this is a use case that's important to you, I'd recommend filing a feature request (again, through Firebase support) so that we can evaluate it for future improvements to the product.

Related

How to fetch the viewid from google analytics by giving the access_token using python

After successful login from the consent screen, I am getting the access_token now the next step is to fetch all the view id from the google analytics account.Please help me out
Example: This is the access_token("ya29.A0ARrdaM8IvLg8jjVHWgxneSp_mxgFYHpKt4LwPGZEVqzOphMA2Cll6mjMxlQRFanbJHh1WrBEYVe2Y1BvBU6j7h_17nVeY4h-FWdUuv5bo0rzETTz_-xw4t5ZNBYpj26Cy3u4Y1trZnqVIA4")
You should check the Managment api quickstart python
"""A simple example of how to access the Google Analytics API."""
import argparse
from apiclient.discovery import build
import httplib2
from oauth2client import client
from oauth2client import file
from oauth2client import tools
def get_service(api_name, api_version, scope, client_secrets_path):
"""Get a service that communicates to a Google API.
Args:
api_name: string The name of the api to connect to.
api_version: string The api version to connect to.
scope: A list of strings representing the auth scopes to authorize for the
connection.
client_secrets_path: string A path to a valid client secrets file.
Returns:
A service that is connected to the specified API.
"""
# Parse command-line arguments.
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser])
flags = parser.parse_args([])
# Set up a Flow object to be used if we need to authenticate.
flow = client.flow_from_clientsecrets(
client_secrets_path, scope=scope,
message=tools.message_if_missing(client_secrets_path))
# Prepare credentials, and authorize HTTP object with them.
# If the credentials don't exist or are invalid run through the native client
# flow. The Storage object will ensure that if successful the good
# credentials will get written back to a file.
storage = file.Storage(api_name + '.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = tools.run_flow(flow, storage, flags)
http = credentials.authorize(http=httplib2.Http())
# Build the service object.
service = build(api_name, api_version, http=http)
return service
def get_first_profile_id(service):
# Use the Analytics service object to get the first profile id.
# Get a list of all Google Analytics accounts for the authorized user.
accounts = service.management().accounts().list().execute()
if accounts.get('items'):
# Get the first Google Analytics account.
account = accounts.get('items')[0].get('id')
# Get a list of all the properties for the first account.
properties = service.management().webproperties().list(
accountId=account).execute()
if properties.get('items'):
# Get the first property id.
property = properties.get('items')[0].get('id')
# Get a list of all views (profiles) for the first property.
profiles = service.management().profiles().list(
accountId=account,
webPropertyId=property).execute()
if profiles.get('items'):
# return the first view (profile) id.
return profiles.get('items')[0].get('id')
return None
def get_results(service, profile_id):
# Use the Analytics Service Object to query the Core Reporting API
# for the number of sessions in the past seven days.
return service.data().ga().get(
ids='ga:' + profile_id,
start_date='7daysAgo',
end_date='today',
metrics='ga:sessions').execute()
def print_results(results):
# Print data nicely for the user.
if results:
print 'View (Profile): %s' % results.get('profileInfo').get('profileName')
print 'Total Sessions: %s' % results.get('rows')[0][0]
else:
print 'No results found'
def main():
# Define the auth scopes to request.
scope = ['https://www.googleapis.com/auth/analytics.readonly']
# Authenticate and construct service.
service = get_service('analytics', 'v3', scope, 'client_secrets.json')
profile = get_first_profile_id(service)
print_results(get_results(service, profile))
if __name__ == '__main__':
main()

Fastapi auth with OAuth2PasswordBearer, how to check if an user is connected without raise an exception?

For an application, I have followed the fastAPI documentation for the authentification process.
By default, OAuth2PasswordBearer raise an HTTPException with status code 401. So, I can't check if an user is actually connected without return a 401 error to the client.
An example of what I want to do:
app = FastAPI()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/users/token")
def get_current_user(token: str = Depends(oauth2_scheme)):
try:
settings = get_settings()
payload = jwt.decode(token, settings.secret_key,
algorithms=[settings.algorithm_hash])
email = payload.get("email")
if email is None:
raise credentials_exception
token_data = TokenData(email=email)
except jwt.JWTError:
raise credentials_exception
user = UserNode.get_node_with_email(token_data.email)
if user is None:
raise credentials_exception
return user
#app.get('/')
def is_connected(user = Depends(get_current_user)
# here, I can't do anything if the user is not connected,
# because an exception is raised in the OAuth2PasswordBearer __call__ method ...
return
I see OAuth2PasswordBearer class have an "auto_error" attribute, which controls if the function returns None or raises an error:
if not authorization or scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
else:
return None
So i think about a workaround:
app = FastAPI()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="api/users/token", auto_error=False)
def get_current_user(token: str = Depends(oauth2_scheme)):
if not token:
return None
# [ ... same token decoding logic than before ... ]
return user
#app.get('/')
def is_connected(user = Depends(get_current_user)
return user
It works, but I wonder what other ways there are to do this, is there a more "official" method?
This is a good question and as far as I know, there isn't an "official" answer that is universally agreed upon.
The approach I've seen most often in the FastAPI applications that I've reviewed involves creating multiple dependencies for each use case.
While the code works similarly to the example you've provided, the key difference is that it attempts to parse the JWT every time - and doesn't only raise the credentials exception when it does not exist. Make sure the dependency accounts for malformed JWTs, invalid JWTs, etc.
Here's an example adapted to the general structure you've specified:
# ...other code
oauth2_scheme = OAuth2PasswordBearer(
tokenUrl="api/users/token",
auto_error=False
)
auth_service = AuthService() # service responsible for JWT management
async def get_user_from_token(
token: str = Depends(oauth2_scheme),
user_node: UserNode = Depends(get_user_node),
) -> Optional[User]:
try:
email = auth_service.get_email_from_token(
token=token,
secret_key=config.SECRET_KEY
)
user = await user_node.get_node_with_email(email)
return user
except Exception:
# exceptions may include no token, expired JWT, malformed JWT,
# or database errors - either way we ignore them and return None
return None
def get_current_user_required(
user: Optional[User] = Depends(get_user_from_token)
) -> Optional[User]:
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="An authenticated user is required for that action.",
headers={"WWW-Authenticate": "Bearer"},
)
return user
def get_current_user_optional(
user: Optional[User] = Depends(get_user_from_token)
) -> Optional[User]:
return user

How to whitelist an api path in FastAPI after adding API key for entire app?

I have created an app as following:
X_API_KEY = APIKeyHeader(name='X-API-Key')
def validate_api_key(x_api_key: str = Depends(X_API_KEY)):
if x_api_key == ENV_API_KEY:
return True
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
app = FastAPI(
title="My boring app",
version=APP_VERSION,
dependencies=[Security(validate_api_key)],
root_path="/api/v1"
)
#app.get("/secretdata")
def secretdata() -> dict:
return 'data'
#app.get("/")
def is_alive() -> dict:
return True
How can I whitelist the '/' path from security (api key)?
One way of achieving this is to split your application into multiple routers as shown in the example for bigger applications in the FastAPI documentation.
Here's an example fitting your case:
# add import
from fastapi import APIRouter
X_API_KEY = APIKeyHeader(name='X-API-Key')
def validate_api_key(x_api_key: str = Depends(X_API_KEY)):
if x_api_key == ENV_API_KEY:
return True
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
app = FastAPI(
title="My boring app",
version=APP_VERSION,
# removed global dependency
root_path="/api/v1"
)
# generate new routers
protected_router = APIRouter()
unprotected_router = APIRouter()
# use respective router
#protected_router.get("/secretdata")
def secretdata() -> dict:
return 'data'
#unprotected_router.get("/")
def is_alive() -> dict:
return True
# include the routers in the application, and add dependencies where needed
app.include_router(protected_router, dependencies=[Security(validate_api_key)]
# note: no dependency for this one
app.include_router(unprotected_router)
For this to be a bit cleaner you would usually split these routers into separate files, as shown in the previously mentioned documentation!

How to disable "check_hostname" using Requests library and Python 3.8.5?

using latest Requests library and Python 3.8.5, I can't seem to "disable" certificate checking on my API call. I understand the reasons not to disable, but I'd like this to work.
When i attempt to use "verify=True", the servers I connect to throw this error:
(Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1123)')))
When i attempt to use "verify=False", I get:
Error making PS request to [<redacted server name>] at URL https://<redacted server name/rest/v2/api_endpoint: Cannot set verify_mode to CERT_NONE when check_hostname is enabled.
I don't know how to also disable "check_hostname" as I haven't seen a way to do that with the requests library (which I plan to keep and use).
My code:
self.ps_server = server
self.ps_base_url = 'https://{}/rest/v2/'.format(self.ps_server)
url = self.ps_base_url + endpoint
response = None
try:
if req_type == 'POST':
response = requests.post(url, json=post_data, auth=(self.ps_username, self.ps_password), verify=self.verify, timeout=60)
return json.loads(response.text)
elif req_type == 'GET':
response = requests.get(url, auth=(self.ps_username, self.ps_password), verify=self.verify, timeout=60)
if response.status_code == 200:
return json.loads(response.text)
else:
logging.error("Error making PS request to [{}] at URL {} [{}]".format(server, url, response.status_code))
return {'status': 'error', 'trace': '{} - {}'.format(response.text, response.status_code)}
elif req_type == 'DELETE':
response = requests.delete(url, auth=(self.ps_username, self.ps_password), verify=self.verify, timeout=60)
return response.text
elif req_type == 'PUT':
response = requests.put(url, json=post_data, auth=(self.ps_username, self.ps_password), verify=self.verify, timeout=60)
return response.text
except Exception as e:
logging.error("Error making PS request to [{}] at URL {}: {}".format(server, url, e))
return {'status': 'error', 'trace': '{}'.format(e)}
Can someone shed some light on how I can disable check_hostname as well, so that I can test this without SSL checking?
If you have pip-system-certs, it monkey-patches requests as well. Here's a link to the code: https://gitlab.com/alelec/pip-system-certs/-/blob/master/pip_system_certs/wrapt_requests.py
After digging through requests and urllib3 source for awhile, this is the culprit in pip-system-certs:
ssl_context = ssl.create_default_context()
ssl_context.load_default_certs()
kwargs['ssl_context'] = ssl_context
That dict is used to grab an ssl_context later from a urllib3 connection pool but it has .check_hostname set to True on it.
As far as replacing the utility of the pip-system-certs package, I think forking it and making it only monkey-patch pip would be the right way forward. That or just adding --trusted-host args to any pip install commands.
EDIT:
Here's how it's normally initialized through requests (versions I'm using):
https://github.com/psf/requests/blob/v2.21.0/requests/adapters.py#L163
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
# NOTE: pool_kwargs doesn't have ssl_context in it
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs)
And here's how it's monkey-patched:
def init_poolmanager(self, *args, **kwargs):
import ssl
ssl_context = ssl.create_default_context()
ssl_context.load_default_certs()
kwargs['ssl_context'] = ssl_context
return super(SslContextHttpAdapter, self).init_poolmanager(*args, **kwargs)

Qt WebEngine set socks5 proxy

I want to set socks5 proxy for my Qt WebEngine app. I use PyQt5.8 , QT5.8.
I set up a socks5 server by danted v1.4.1. I test my socks5 server and it worked good. But when I use it in my app, danted log errors:
error after reading 3 bytes in 0 seconds: client offered no acceptable authentication method
This is my code:
def set_proxy():
from PyQt5.QtNetwork import QNetworkProxy
proxy = QNetworkProxy()
from six.moves.urllib import parse as urlparse
string_proxy = "socks5://username:password#ip:port"
urlinfo = urlparse.urlparse(string_proxy)
proxy = QNetworkProxy()
if urlinfo.scheme == 'socks5':
proxy.setType(QNetworkProxy.Socks5Proxy)
else:
proxy.setType(QNetworkProxy.NoProxy)
if urlinfo.hostname != None:
proxy.setHostName(urlinfo.hostname)
if urlinfo.port != None:
proxy.setPort(urlinfo.port)
if urlinfo.username != None:
proxy.setUser(urlinfo.username)
else:
proxy.setUser('')
if urlinfo.password != None:
proxy.setPassword(urlinfo.password)
else:
proxy.setPassword('')
QNetworkProxy.setApplicationProxy(proxy)
Can anyone help me?
update on 2017/03/29
add proxyAuthenticationRequired signal
def set_proxy(string_proxy):
proxy = QNetworkProxy()
urlinfo = urlparse.urlparse(string_proxy)
if urlinfo.scheme == 'socks5':
proxy.setType(QNetworkProxy.Socks5Proxy)
elif urlinfo.scheme == 'http':
proxy.setType(QNetworkProxy.HttpProxy)
else:
proxy.setType(QNetworkProxy.NoProxy)
proxy.setHostName(urlinfo.hostname)
proxy.setPort(urlinfo.port)
proxy.setUser(urlinfo.username)
proxy.setPassword(urlinfo.password)
QNetworkProxy.setApplicationProxy(proxy)
def handleProxyAuthReq(url, auth, proxyhost):
auth.setUser(username)
auth.setPassword(password)
webView = QtWebEngineWidgets.QWebEngineView()
#proxy_string = "http://username:password#ip:port"
proxy_string = "socks5://username:password#ip:port"
set_proxy(proxy_string)
webView.page().proxyAuthenticationRequired.connect(handleProxyAuthReq)
I test it by my Http proxy and it worded. But when I use Socks5 proxy, the proxyAuthenticationRequired signal can not be emited.
QtWebEngine does not handle the username/password information from QNetworkProxy:
All other proxy settings such as QNetworkProxy::rawHeader(), QNetworkProxy::user(), or QNetworkProxy::password() are ignored.
You'll need to handle proxyAuthenticationRequired and handle authentication there.
update on 2017/03/30
Looks like Chromium does not support authentication with SOCKS proxies.

Resources