Related
I am new to Deno and have the following simple code...
const getCOP = async()=>{
const resp = await fetch("....", {
headers: {
accept: "application/json",
apiKey
},
});
return await resp.body;
}
let resp = {}
return new Response(resp.body, {
status: resp.status,
headers: {
"content-type": "application/json",
},
});
resp.body = await getCOP();
resp.status = 200;
It returns
{
"success": true,
"timestamp": 1675621083,
"base": "COP",
"date": "2023-02-05",
"rates": {
"EUR": 0.000199,
"GBP": 0.000179,
"USD": 0.000216
}
}
What I would like to do would be the equivalent of this in normal JS...
return {
rate : resp.body.rates,
copPerDollar : 1 / resp.body.rates.USD
}
Of course this doesn't seem to work because instead of an actual json obj it is a readable stream. How do I transform this stream into a json object and then restream it to the body of the sent request?
I have a table that has more than 25 items and wrote a basic script to break them into sub arrays of 25 items each then loops thru that collection of sub arrays to run a batch write item command in the AWS DynamoDB Client. The issue I am getting is a returned validation error. When I run the same seed file via the aws-cli it seeds the table perfectly. This makes me think it has something to do with my script. See anything I am missing? Thanks in advance!
var { DynamoDB } = require('aws-sdk');
var db = new DynamoDB.DocumentClient({
region: 'localhost',
endpoint: 'http://localhost:8000',
});
const allItems = require('./allItems.json');
const tableName = 'some-table-name';
console.log({ tableName, allItems });
var batches = [];
var currentBatch = [];
var count = 0;
for (let i = 0; i < allItems.length; i++) {
//push item to the current batch
count++;
currentBatch.push(allItems[i]);
if (count === 25) {
batches.push(currentBatch);
currentBatch = [];
}
}
//if there are still items left in the curr batch, add to the collection of batches
if (currentBatch.length > 0 && currentBatch.length !== 25) {
batches.push(currentBatch);
}
var completedRequests = 0;
var errors = false;
//request handler for DynamoDB
function requestHandler(err, data) {
console.log('In the request handler...');
return function (err, data) {
completedRequests++;
errors = errors ? true : err;
//log error
if (errors) {
console.error('Request caused a DB error.');
console.error('ERROR: ' + err);
console.error(JSON.stringify(err, null, 2));
} else {
var res = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true,
},
body: JSON.stringify(data),
isBase64Encoded: false,
};
console.log(`Success: returned ${data}`);
return res;
}
if (completedRequests == batches.length) {
return errors;
}
};
}
//Make request
var params;
for (let j = 0; j < batches.length; j++) {
//items go in params.RequestedItems.id array
//format for the items is {PutRequest : {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + tableName + '": []}}';
params = JSON.parse(params);
params.RequestItems[tableName] = batches[j];
console.log('before db.batchWriteItem: ', params);
try {
//send to db
db.batchWrite(params, requestHandler(params));
} catch{
console.error(err)
}
}
Here is the formatted request object and the error:
before db.batchWriteItem:
{ RequestItems:
{ 'some-table-name': [ [Object], [Object], [Object], [Object] ] }
}
In the request handler...
Request caused a DB error.
ERROR: ValidationException: Invalid attribute value type
{
"message": "Invalid attribute value type",
"code": "ValidationException",
"time": "2020-08-04T10:51:13.751Z",
"requestId": "dd49628c-6ee9-4275-9349-6edca29636fd",
"statusCode": 400,
"retryable": false,
"retryDelay": 47.94198279972915
}
You are using the DocumentClient in the nodejs code. This will automatically convert the data format used by DynamoDB to a more easily consumable format.
e.g.
{
"id": {
"S": "A string value"
}
}
would become
{
"id": "A string value"
}
The CLI does not perform this data conversion.
You can use the regular DynamoDB client to not perform this conversion in Nodejs. e.g. const db = new Dynamodb()
I try to describe my asp.net Web API OAuth endpoint in swagger using Swashbuckle 5.6.0 and tried this solution:
How to show WebApi OAuth token endpoint in Swagger
My problem is, that the URL of receiving an access token and getting a new one by refresh token is the same in asp.net OAuth Authorization Server.
Adding the second URL to the Swagger Document Path fails due to the fact, that "paths" is a IDictionary<string, PathItem>.
public class AuthTokenOperation : IDocumentFilter
{
public void Apply(SwaggerDocument swaggerDoc, SchemaRegistry schemaRegistry, IApiExplorer apiExplorer)
{
// get the Token Endpoint from Config
var endpoint = Helpers.GetAppSetting("TokenEndPoint");
// Access Token
swaggerDoc.paths.Add(endpoint, new PathItem
{
post = new Operation
{
tags = new List<string> { "AccessToken" },
consumes = new string[] { "application/x-www-form-url-encoded" },
produces = new string[] { "application/json" },
parameters = new List<Parameter>
{
new Parameter
{
type = "string",
name = "username",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "password",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "grant_type",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_id",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_secret",
required = true,
#in = "formData"
}
}
}
});
// Refresh Token
swaggerDoc.paths.Add(endpoint, new PathItem
{
post = new Operation
{
tags = new List<string> { "AccessToken" },
consumes = new string[] { "application/x-www-form-url-encoded" },
produces = new string[] { "application/json" },
parameters = new List<Parameter>
{
new Parameter
{
type = "string",
name = "grant_type",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_id",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_secret",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "refresh_token",
required = true,
#in = "formData"
}
}
}
});
}
}
Is there any possibility to describe two api methods pointing to the same endpoint, just using diffrent parameters?
Like shown here: https://api.gettyimages.com/swagger/ui/index#!/OAuth
Finally the hint "adding something meaningless" worked for our use case.
I additionally added a new model class AuthServerResponseModel, where the response of the auth request is mapped in.
public class AuthServerResponseModel
{
public string access_token { get; set; }
public string token_type { get; set; }
public int expires_in { get; set; }
public string refresh_token { get; set; }
public string audience { get; set; }
}
To make this object be known in Swagger, the class has to be added to the SchemaRegistry.
After that I could use the "#ref" tag in the response schema to declare the response type of my auth request.
public class AuthTokenOperation : IDocumentFilter
{
public void Apply(SwaggerDocument swaggerDoc, SchemaRegistry schemaRegistry, IApiExplorer apiExplorer)
{
schemaRegistry.GetOrRegister(typeof(AuthServerResponseModel));
// get the Token Endpoint from Config
string endpoint = "URL-To-The-OAuth-Endpoint";
// Access Token
swaggerDoc.paths.Add(endpoint + "#AccessToken", new PathItem
{
post = new Operation
{
operationId = "AccessToken",
tags = new List<string> { "Token" },
consumes = new string[] { "application/x-www-form-url-encoded" },
produces = new string[] { "application/json" },
parameters = new List<Parameter>
{
new Parameter
{
type = "string",
name = "username",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "password",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "grant_type",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_id",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_secret",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "DeviceId",
required = false,
#in = "formData"
}
},
responses = new Dictionary<string, Response>()
{
{ "200", new Response() { description = "Ok", schema = new Schema() { type = "object", #ref = "#/definitions/AuthServerResponseModel" } } },
{ "400", new Response() { description = "BadRequest" } },
{ "404", new Response() { description = "NotFound" } }
}
}
});
// Refresh Token
swaggerDoc.paths.Add(endpoint + "#RefreshToken", new PathItem
{
post = new Operation
{
operationId = "RefreshToken",
tags = new List<string> { "Token" },
consumes = new string[] { "application/x-www-form-url-encoded" },
produces = new string[] { "application/json" },
parameters = new List<Parameter>
{
new Parameter
{
type = "string",
name = "grant_type",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_id",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "client_secret",
required = true,
#in = "formData"
},
new Parameter
{
type = "string",
name = "refresh_token",
required = true,
#in = "formData"
}
},
responses = new Dictionary<string, Response>()
{
{ "200", new Response() { description = "Ok", schema = new Schema() { type = "object", #ref = "#/definitions/AuthServerResponseModel" } } },
{ "400", new Response() { description = "BadRequest" } },
{ "404", new Response() { description = "NotFound" } }
}
}
});
}
}
Automatic Client generation using Swagger Codegen works well now.
The paths is a dictionary in swashbuckle:
public class SwaggerDocument
{
public readonly string swagger = "2.0";
public Info info;
...
public IDictionary<string, PathItem> paths;
...
}
that is why the exception "key has already been added to the collection"
On swagger we follow the Open Api Specification, and that path is a patterned field:
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#patterned-fields
And they clearly state that duplicates is a no no for those patterned fields:
Patterned fields can have multiple occurrences as long as each has a unique name.
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#specification
The best option is to append something meaningless (like hash) to the endpoints to make them different, it could be something like:
swaggerDoc.paths.Add(endpoint + "#Access", new PathItem ...
swaggerDoc.paths.Add(endpoint + "#Refresh", new PathItem ...
How are the guys at gettyimages getting around that?
Here are some interesting findings
Their swagger json does not have those /oauth2/token paths
you can double check directly here:
https://api.gettyimages.com/swagger/docs/3
Here is how their swagger json looks like in the latest version of the swagger-ui:
http://petstore.swagger.io/?defaultModelsExpandDepth=0&docExpansion=none&url=https://api.gettyimages.com/swagger/docs/3
The version of swagger-ui they are using at gettyimages is heavily customized, I think they are injecting additional paths using JS
https://api.gettyimages.com/swagger/ui/ext/GettyImages-Resources-OAuthGrant-js
You can do that too, it will be a lot more work than just appending something to the endpoints
While writing activity it return exception
ex {"The remote server returned an error: (400) Bad Request."}
I'm using the following code :-
public static string PostRequesttoYammer(string postBody, string url,string authHeader = null, string contentType = null)
{
string results = string.Empty;
try
{
HTTPWebReq = WebRequest.CreateHttp(url);
HTTPWebReq.Method = "POST";
if (!string.IsNullOrEmpty(authHeader))
HTTPWebReq.Headers.Add("Authorization: Bearer " + authHeader);
byte[] postByte = Encoding.UTF8.GetBytes(postBody);
if (string.IsNullOrEmpty(contentType))
HTTPWebReq.ContentType = "application/x-www-form-urlencoded";
else
HTTPWebReq.ContentType = contentType;
HTTPWebReq.ContentLength = postByte.Length;
Stream postStream = HTTPWebReq.GetRequestStream();
postStream.Write(postByte, 0, postByte.Length);
postStream.Close();
HTTPWebRes = (HttpWebResponse)HTTPWebReq.GetResponse();
postStream = HTTPWebRes.GetResponseStream();
StreamReader postReader = new StreamReader(postStream);
results = postReader.ReadToEnd();
postReader.Close();
postStream.Close();
}
catch (Exception ex)
{
}
return results;
}
I have obtained access token after that i'm trying to write an activity on yammer network .enter image description here
The image shows the content of local variables of function.
check the bellow code:
yam.platform.request({
url: "activity.json",
method: "GET",
data: {
"activity": {
"actor": { "name": "name", "email": "name#domain.onmicrosoft.com" },
"action": "create",
"object": {
"url": "https://www.news.google.com",
"image": "url",
"description": "Testing Description",
"title": "Open Graph Title"
},
"private": "false",
"message": "testing commit"
}
},
success: function (activity) {
console.log("Activity request was successful.");
},
error: function (activity) {
console.error("There was an error with the request.");
}
});
I am writing an add-on for firefox but I seem to have a few issues with the channel. I am trying to write a program that receives a websites certificate information and saves it in a specific format. It does not pick up the information I need.
Heres my code:(Sorry for putting a lot there, but its all essential.)
const {Cc,Ci,Cu} = require("chrome");
let console = (Cu.import("resource://gre/modules/devtools/Console.jsm", {})).console;
console.log("Importing console.\n");
// Cu.import("resource://gre/modules/XPCOMUtils.jsm");
this._loadHandler = function(e) {myExtension.onPageLoad(e); };
var utils = require('sdk/window/utils');
var gBrowser = utils.getMostRecentBrowserWindow().getBrowser();
gBrowser.addEventListener("load", this._loadHandler, true);
// We register an observer on page load, through page load event listener. Is this round-about?
// Can we register it directly?
var myExtension = {
onPageLoad: function(aEvent) {
var doc = aEvent.originalTarget; // doc is document that triggered "onload" event
var i = 0;
console.log("Page has been loaded.\n");
this.registerObserver("http-on-examine-response");
this.initialized = true;
},
onPageUnload: function(aEvent) {
console.log("Page has been unloaded.\n");
this.unregisterObserver("http-on-examine-response");
},
registerObserver: function(topic) {
var observerService = Cc["#mozilla.org/observer-service;1"]
.getService(Ci.nsIObserverService);
observerService.addObserver(this, topic, false);
console.log("Observer Registered.\n");
},
unregisterObserver: function(topic) {
var observerService = Cc["#mozilla.org/observer-service;1"]
.getService(Ci.nsIObserverService);
observerService.removeObserver(this, topic);
console.log("Observer Unregistered.\n");
},
//Observation Function
observe: function(channel, topic, data) {
console.log("Running Observe....\n");
this.examine_cert(channel);
},
examine_cert: function(channel) {
channel.QueryInterface(Ci.nsIHttpChannel);
//Assigns the channel of the http resource in the host port
var host = channel.URI.hostPort;
//Checks for security info
var si = channel.securityInfo;
if (!si){
console.log("No securityInfo for "+ host +" , returning\n");
return;
}
//Gets Certificates status
si.QueryInterface(Ci.nsISSLStatusProvider);
var st = si.SSLStatus;
if (!st){
console.log(st + "\n");
console.log("No SSLStatus for "+ host +" , returning\n");
return;
}
st.QueryInterface(Ci.nsISSLStatus);
var cert = st.serverCert;
if (!cert){
console.log("No serverCert for "+ host +" , returning\n");
return;
}
// by now we must have a non-empty cert, print it and the status
console.log( "status: " + st + "\n");
console.log( "cert : " + cert.commonName + "\n");
//certobj.ciphername = st.cipherName; // apparently, parsed out of cert when SSLStatus is made
//certobj.keyLength = st.keyLength;
//certobj.secretKeyLength = st.secretKeyLength;
},
// Recursively descend into the object, looking for displayName matching re;
// also, validate objects as we go.
findASN1Object: function (struc, re) {
if (!struc){
console.log("\nCertificate does not have a valid structure.\n");
return;
}
// Shortcut: object with displayName matching re found, return it immediately
if (re.test(struc.displayName)) return struc;
var s = Ci;
try {
s = struc.QueryInterface(Ci.nsIASN1Sequence);
}
catch (e) {
console.log("\nCertificate failed nsIASN1Sequence conversion\n");
}
if ( !s || ! s.isValidContainer) {
console.log("\nCertificate does not have a valid container.\n");
return;
}
// check all the objects recursively
for (var i=0; i<s.ASN1Objects.length; i++) {
struc = s.ASN1Objects.queryElementAt(i, Ci.nsIASN1Object);
var res = this.findASN1Object(struc, re);
if (res) return res;
}
},
//------ Object containment:
// nsIHttpChannel > securityInfo (nsISSLStatusProvider) > SSLStatus (nsISSLStatus) > serverCert (nsIX509Cert)
fillCertObj: function(obj, cert) {
obj.cert = cert;
console.log("\nCert:" + cert + "\n");
obj.notBefore = cert.validity.notBefore;
obj.notAfter = cert.validity.notAfter;
if (cert.issuer) {
obj.issuerMd5Fingerprint = cert.issuer.md5Fingerprint;
obj.issuerSha1Fingerprint = cert.issuer.sha1Fingerprint;
}
else {
//console.log("no issuer: "+ [cert.commonName, cert.issuer, cert.sha1Fingerprint]); \
console.log("\nThe Certificate doesn't have an Issuer.\n");
}
var keys = ["commonName", "organization", "organizationalUnit", "serialNumber","emailAddress",
"issuerCommonName", "issuerOrganization", "issuerOrganizationUnit","md5Fingerprint", "sha1Fingerprint" ];
for (var i in keys){
obj[keys[i]] = cert[keys[i]];
}
obj.subjectAltName = [];
var san = this.findASN1Object(cert.ASN1Structure, /^Certificate Subject Alt Name$/);
if (san) {
//this.log("SAN:", [san.displayName, san.displayValue]);
var m, re = /DNS Name: ((?:\*\.)?[a-z0-9.-]+)/g;
// *.smth1.smth2.smth3....smthn where smth has only a-z,0-9,or - or
// smth1.smth2.smth3....smthn where smth has only a-z,0-9,or -
// push domain names contained in displayValue onto the list subjectAltName
while (m = re.exec(san.displayValue))
obj.subjectAltName.push(m[1]);
}
console.log("Filled Certificate.\n");
},
init_cert: function(){
var certobj = this.newCertObj();
//certobj.host = host;
//certobj.ciphername = st.cipherName;
//certobj.keyLength = st.keyLength;
//certobj.secretKeyLength = st.secretKeyLength;
//this.fillCertObj(certobj.now, cert);
console.log(certobj);
},
newCertObj: function() {
console.log("Creating Empty Certificate.\n");
return {
flags: 0,
host: "",
warn: {},
now: {
commonName: "",
organization: "",
organizationalUnit: "",
serialNumber: "",
emailAddress: "",
notBefore: "",
notAfter: "",
issuerCommonName: "",
issuerOrganization: "",
issuerOrganizationUnit: "",
md5Fingerprint: "",
sha1Fingerprint: "",
issuerMd5Fingerprint: "",
issuerSha1Fingerprint: "",
cert: null,
},
old: {
commonName: "",
organization: "",
organizationalUnit: "",
serialNumber: "",
emailAddress: "",
notBefore: "",
notAfter: "",
issuerCommonName: "",
issuerOrganization: "",
issuerOrganizationUnit: "",
md5Fingerprint: "",
sha1Fingerprint: "",
issuerMd5Fingerprint: "",
issuerSha1Fingerprint: "",
cert: null,
},
};
},
}
It's not wise to register and unregister observer on page load because it will be catching http requests from everywhere. So register it on startup of addon and unregister it on shutdown/unload of addon.
And you should check the loadContext to see for what window/tab/frame/ajax/background is making the request. See here for more on loadContext: https://stackoverflow.com/a/25223307/1828637
But anyways this should work the way you have it meaning reg on page load and unreg on page unload:
const {
Cc, Ci, Cu
} = require("chrome");
let console = (Cu.import("resource://gre/modules/devtools/Console.jsm", {})).console;
console.log("Importing console.\n");
// Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
this._loadHandler = function(e) {
myExtension.onPageLoad(e);
};
var utils = require('sdk/window/utils');
var gBrowser = utils.getMostRecentBrowserWindow().getBrowser();
gBrowser.addEventListener("load", this._loadHandler, true);
// We register an observer on page load, through page load event listener. Is this round-about?
// Can we register it directly?
var myExtension = {
observers: {
'http-on-examine-response': {
observe: function(aSubject, aTopic, aData) {
console.log("Running Observe....\n");
myExtension.examine_cert(aSubject);
},
reg: function() {
Services.obs.addObserver(myExtension.observers['http-on-examine-response'], 'http-on-examine-response', false);
},
unreg: function() {
Services.obs.removeObserver(myExtension.observers['http-on-examine-response'], 'http-on-examine-response');
}
}
},
onPageLoad: function(aEvent) {
var doc = aEvent.originalTarget; // doc is document that triggered "onload" event
var i = 0;
console.log("Page has been loaded.\n");
myExtension.observers['http-on-examine-response'].reg();
this.initialized = true;
},
onPageUnload: function(aEvent) {
console.log("Page has been unloaded.\n");
myExtension.observers['http-on-examine-response'].unreg();
},
examine_cert: function(channel) {
channel.QueryInterface(Ci.nsIHttpChannel);
//Assigns the channel of the http resource in the host port
var host = channel.URI.hostPort;
//Checks for security info
var si = channel.securityInfo;
if (!si) {
console.log("No securityInfo for " + host + " , returning\n");
return;
}
//Gets Certificates status
si.QueryInterface(Ci.nsISSLStatusProvider);
var st = si.SSLStatus;
if (!st) {
console.log(st + "\n");
console.log("No SSLStatus for " + host + " , returning\n");
return;
}
st.QueryInterface(Ci.nsISSLStatus);
var cert = st.serverCert;
if (!cert) {
console.log("No serverCert for " + host + " , returning\n");
return;
}
// by now we must have a non-empty cert, print it and the status
console.log("status: " + st + "\n");
console.log("cert : " + cert.commonName + "\n");
//certobj.ciphername = st.cipherName; // apparently, parsed out of cert when SSLStatus is made
//certobj.keyLength = st.keyLength;
//certobj.secretKeyLength = st.secretKeyLength;
},
// Recursively descend into the object, looking for displayName matching re;
// also, validate objects as we go.
findASN1Object: function(struc, re) {
if (!struc) {
console.log("\nCertificate does not have a valid structure.\n");
return;
}
// Shortcut: object with displayName matching re found, return it immediately
if (re.test(struc.displayName)) return struc;
var s = Ci;
try {
s = struc.QueryInterface(Ci.nsIASN1Sequence);
} catch (e) {
console.log("\nCertificate failed nsIASN1Sequence conversion\n");
}
if (!s || !s.isValidContainer) {
console.log("\nCertificate does not have a valid container.\n");
return;
}
// check all the objects recursively
for (var i = 0; i < s.ASN1Objects.length; i++) {
struc = s.ASN1Objects.queryElementAt(i, Ci.nsIASN1Object);
var res = this.findASN1Object(struc, re);
if (res) return res;
}
},
//------ Object containment:
// nsIHttpChannel > securityInfo (nsISSLStatusProvider) > SSLStatus (nsISSLStatus) > serverCert (nsIX509Cert)
fillCertObj: function(obj, cert) {
obj.cert = cert;
console.log("\nCert:" + cert + "\n");
obj.notBefore = cert.validity.notBefore;
obj.notAfter = cert.validity.notAfter;
if (cert.issuer) {
obj.issuerMd5Fingerprint = cert.issuer.md5Fingerprint;
obj.issuerSha1Fingerprint = cert.issuer.sha1Fingerprint;
} else {
//console.log("no issuer: "+ [cert.commonName, cert.issuer, cert.sha1Fingerprint]); \
console.log("\nThe Certificate doesn't have an Issuer.\n");
}
var keys = ["commonName", "organization", "organizationalUnit", "serialNumber", "emailAddress",
"issuerCommonName", "issuerOrganization", "issuerOrganizationUnit", "md5Fingerprint", "sha1Fingerprint"
];
for (var i in keys) {
obj[keys[i]] = cert[keys[i]];
}
obj.subjectAltName = [];
var san = this.findASN1Object(cert.ASN1Structure, /^Certificate Subject Alt Name$/);
if (san) {
//this.log("SAN:", [san.displayName, san.displayValue]);
var m, re = /DNS Name: ((?:\*\.)?[a-z0-9.-]+)/g;
// *.smth1.smth2.smth3....smthn where smth has only a-z,0-9,or - or
// smth1.smth2.smth3....smthn where smth has only a-z,0-9,or -
// push domain names contained in displayValue onto the list subjectAltName
while (m = re.exec(san.displayValue))
obj.subjectAltName.push(m[1]);
}
console.log("Filled Certificate.\n");
},
init_cert: function() {
var certobj = this.newCertObj();
//certobj.host = host;
//certobj.ciphername = st.cipherName;
//certobj.keyLength = st.keyLength;
//certobj.secretKeyLength = st.secretKeyLength;
//this.fillCertObj(certobj.now, cert);
console.log(certobj);
},
newCertObj: function() {
console.log("Creating Empty Certificate.\n");
return {
flags: 0,
host: "",
warn: {},
now: {
commonName: "",
organization: "",
organizationalUnit: "",
serialNumber: "",
emailAddress: "",
notBefore: "",
notAfter: "",
issuerCommonName: "",
issuerOrganization: "",
issuerOrganizationUnit: "",
md5Fingerprint: "",
sha1Fingerprint: "",
issuerMd5Fingerprint: "",
issuerSha1Fingerprint: "",
cert: null,
},
old: {
commonName: "",
organization: "",
organizationalUnit: "",
serialNumber: "",
emailAddress: "",
notBefore: "",
notAfter: "",
issuerCommonName: "",
issuerOrganization: "",
issuerOrganizationUnit: "",
md5Fingerprint: "",
sha1Fingerprint: "",
issuerMd5Fingerprint: "",
issuerSha1Fingerprint: "",
cert: null,
},
};
},
}
lastly why are you wrapping in the pseudo-class myExtension? When doing addon-sdk or bootstrap addons you don't have to do that anymore. It makes things easier so you don't have to track what scope you are in.