I am looking for a simple solution to catch any window re-directions (301, 302 etc.) to retrieve the corresponding url from a nsiWebProgress.
Current solution
Right now, I am using the nsIWebProgress.NOTIFY_STATE_DOCUMENT event listener, processing any STATE_REDIRECTING status responses (also known as "flags") and iterating the responseHeaders from the corresponding request's nsIHttpChannel interface , as shown below.
Question
However, I am not sure, if I am using the best event listener (NOTIFY_STATE_DOCUMENT) and if this is really the fastest possible solution.
Furthermore, I am wondering, if firefox has processed any validation of the response's location header at this state, since I am tending to parse the location header by using the sdk/url.URL method, to be sure that the given location header is valid and no xss scam.
Client request:
GET /garage HTTP/1.1
Host: www.batmans.cave
Server response:
HTTP/1.1 301 Moved Permanently
Location: https://batmans.cave/garage/batmobile
Working example (simplified):
const { Ci } = require("chrome");
const LOCATION_HEADER = "location";
...
interfaces: ["nsIWebProgressListener", "nsISupportsWeakReference"],
add(browser) {
try {
browser.addProgressListener(this, Ci.nsIWebProgress.NOTIFY_STATE_DOCUMENT);
} catch (error) {}
},
...
/**
* Called when the state has changed for the window being watched changes.
* #see https://developer.mozilla.org/en-US/docs/Mozilla/Tech/XPCOM/Reference/Interface/nsIWebProgressListener#onStateChange%28%29
* #param {nsIWebProgress} webProgress
* #param {nsIRequest} request
* #param {nsresult} status
* #param {String} message
*/
onStateChange(webProgress, request, status, message) {
if (status & Ci.nsIWebProgressListener.STATE_REDIRECTING) {
const window = webProgress.DOMWindow;
if (request instanceof Ci.nsIHttpChannel) {
const httpChannel = request.QueryInterface(Ci.nsIHttpChannel);
let location;
httpChannel.visitResponseHeaders(function (header, value) {
if (header.toLowerCase() == LOCATION_HEADER) {
location = value;
}
});
if (location) {
try {
const url = URL(location).toString();
console.log("redirect to", url); // OK! "https://batmans.cave/garage/batmobile"
}
catch (error) {}
}
}
}
}
Related
I want to factorize my code in Cloud Functions in order to improve readability and maintenance. The code below works but after waiting for all Promises to complete with Promises.all(), the code timeout.
The things I don't understand is that :
It works great and complete without timeout when toiletJsonObject["fields"]["adresse"] = formatAddress(toiletJsonObject["fields"]["adresse"]) is commented
If it works without the line above, the timeout should be due to the formatAddress() function. However, this function in not an async one and just return a string synchronously. Maybe that's what I misunderstand.
So my questions are :
How to correct my code to avoid timeout?
what's the best way to factorize code with custom functions that are only accessible inside the file and therefore does not need export ?
The entire code :
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import fetch from "node-fetch";
admin.initializeApp();
const db = admin.firestore();
export const tempoCF = functions.firestore.document("/tempo/{docId}").onCreate(async () => {
console.log("onCreate")
const settings = { method: "Get" }
const metaUrl = "https://opendata.paris.fr/api/datasets/1.0/sanisettesparis/"
const toiletUpdateDateRef = db.collection('toilets').doc("updateDate")
try {
// Get meta data to check last update date
const metaResponse = await fetch(metaUrl, settings)
const metaJson = await metaResponse.json()
const metaUpdateDate = metaJson["metas"]["modified"]
const lastUpdatedDateDoc = await toiletUpdateDateRef.get()
if (!lastUpdatedDateDoc.exists) {
console.log("No existing date document, create one and add last update date : " + metaUpdateDate)
await fetchDataFromURL()
return toiletUpdateDateRef.set({ "lastUpdateDate": metaUpdateDate })
} else {
const lastUpdateDate = lastUpdatedDateDoc.data()["lastUpdateDate"]
// If date from meta data newer that saved date : get data and update
if (new Date(lastUpdateDate) < new Date(metaUpdateDate)) {
console.log("New data available, update database")
await fetchDataFromURL()
return toiletUpdateDateRef.set({ "lastUpdateDate": metaUpdateDate })
}
else {
console.log("No new data available, do nothing")
return null
}
}
}
catch (error) {
console.log(error);
return null;
}
}
);
async function fetchDataFromURL() {
const dataUrl = "https://opendata.paris.fr/api/records/1.0/search/?dataset=sanisettesparis&q=&rows=-1"
const settings = { method: "Get" }
try {
const response = await fetch(dataUrl, settings)
const json = await response.json()
const promises = []
console.log("fetch data and add toilets to collection")
json["records"].forEach(toiletJsonObject => {
delete toiletJsonObject["fields"]["geo_shape"]
toiletJsonObject["fields"]["adresse"] = formatAddress(toiletJsonObject["fields"]["adresse"])
console.log("after updating adresse field: " + toiletJsonObject["fields"].toString())
const p = db.collection("toilets").doc(toiletJsonObject["recordid"]).set(toiletJsonObject["fields"])
promises.push(p)
})
console.log("finished creating promises. Wait for all to complete")
return Promise.all(promises);
}
catch (error) {
console.log(error);
return null;
}
}
const linkWords = ["de", "des", "du", "le"]
const linkLetters = ["l", "d"]
const firstWordsAddress = ["face", "opposé", "au"]
const alwaysLowerCaseWords = ["ville", "rue"]
function formatAddress(address) {
let processedAddress = ""
if (address != null) {
//if (address.length <= 1) processedAddress = address.toUpperCase();
// Split string into list of words
var wordsList = address.split(' ')
.filter((word) => {
// If there is a word in front of the street number, don't use it
if (firstWordsAddress.includes(word.toLowerCase())) return false
// Else use it
return true
})
var capitalizedList = wordsList.map((word) => {
const lowerCaseWord = word.toLowerCase() //TOSTRING ?
// If current word is a link word, don't capitalize
if (linkWords.includes(lowerCaseWord))
return lowerCaseWord
// If current word is a link letter, add ' char
else if (linkLetters.includes(lowerCaseWord))
return lowerCaseWord + '\''
// If current word should always be in lower case, don't capitalize
else if (alwaysLowerCaseWords.includes(lowerCaseWord))
return word.toLowerCase() //TOSTRING
// Else, capitalize the word
return word[0].toUpperCase() + word.substr(1).toLowerCase()
});
// Always capitalize first word of the address
capitalizedList[0] = capitalizedList[0][0].toUpperCase() + capitalizedList[0].substr(1).toLowerCase()
processedAddress = capitalizedList.join(' ')
processedAddress = processedAddress.replace("\' ", "\'")
processedAddress = processedAddress.trim()
}
return processedAddress
}
Regarding the formatAddress() helper function you defined, there doesn't appear to be an issue with it in it's current form. It can happily run through the entire list of 644 addresses ~210 times per second.
Any timeouts are instead likely to be caused by performing so many database writes in quick succession. When running fetchDataFromURL(), you "spam" the Firestore server with a request for each toilet object you are uploading.
The best-practice approach would be to compile a Batched Write and then commit the result once you've finished processing the data.
As stated in that documentation:
A batched write can contain up to 500 operations. Each operation in the batch counts separately towards your Cloud Firestore usage. Within a write operation, field transforms like serverTimestamp, arrayUnion, and increment each count as an additional operation.
Note: The current list of field transforms includes serverTimestamp, arrayUnion, arrayRemove, and increment. Reference: FieldValue
Creating/deleting/writing a document to Firestore is considered "one operation". Because a field transform requires reading the document, then writing data to that document, it is counted as "two operations".
Because a single batched write is limited to 500 operations, you should split your data up into smaller batched writes so that each batch is less than this 500 operations limit. The easiest way to achieve this would be to use this MultiBatch class (included below) that I've updated from one of my old answers.
If the data you are writing to a Cloud Firestore document is just basic data, use one of multibatch.create(), multibatch.delete(), multibatch.set(), or multibatch.update(). Each time one of these is called, the internal operations counter is increased by 1.
If the data you are writing to Cloud Firestore contains any FieldValue
transforms, use one of multibatch.transformCreate(), multibatch.transformDelete(), multibatch.transformSet(), or multibatch.transformUpdate(). Each time one of these is called, the internal operations counter is increased by 2.
Once the internal counter exceeds 500, it automatically starts a new batched write and adds it to it's internal list.
When you've queued up all your data ready to send off to Firestore, call multibatch.commit().
console.log("Fetching data from third-party server...")
const response = await fetch(dataUrl, settings)
const json = await response.json()
console.log("Data obtained. Parsing as Firestore documents...")
const batch = new MultiBatch(db)
json["records"].forEach(toiletJsonObject => {
delete toiletJsonObject["fields"]["geo_shape"]
toiletJsonObject["fields"]["adresse"] = formatAddress(toiletJsonObject["fields"]["adresse"])
console.log("after updating adresse field: " + toiletJsonObject["fields"].toString())
batch.set(db.collection("toilets").doc(toiletJsonObject["recordid"]), toiletJsonObject["fields"])
})
console.log("Finished parsing. Committing data to Firestore...")
const results = await batch.commit() // see notes about MultiBatch#commit()
console.log("Finished data upload!")
return results;
import { firestore } from "firebase-admin";
/**
* Helper class to compile an expanding `firestore.WriteBatch`.
*
* Using an internal operations counter, this class will automatically start a
* new `firestore.WriteBatch` instance when it detects it has hit the operations
* limit of 500. Once prepared, you can commit the batches together.
*
* Note: `FieldValue` transform operations such as `serverTimestamp`,
* `arrayUnion`, `arrayRemove`, `increment` are counted as two operations. If
* your written data makes use of one of these, you should use the appropriate
* `transformCreate`, `transformSet` or `transformUpdate` method so that the
* internal counter is correctly increased by 2 (the normal versions only
* increase the counter by 1).
*
* If not sure, just use `delete`, `transformCreate`, `transformSet`, or
* `transformUpdate` functions for every operation as this will make sure you
* don't exceed the limit.
*
* #author Samuel Jones [MIT License] (#samthecodingman)
* #see https://stackoverflow.com/a/66692467/3068190
* #see https://firebase.google.com/docs/firestore/manage-data/transactions
* #see https://firebase.google.com/docs/reference/js/firebase.firestore.FieldValue
*/
export class MultiBatch {
constructor(dbRef) {
this.dbRef = dbRef;
this.committed = false;
this.currentBatch = this.dbRef.batch();
this.currentBatchOpCount = 0;
this.batches = [this.currentBatch];
}
_getCurrentBatch(count) {
if (this.committed) throw new Error("MultiBatch already committed.");
if (this.currentBatchOpCount + count > 500) {
// operation limit exceeded, start a new batch
this.currentBatch = this.dbRef.batch();
this.currentBatchOpCount = 0;
this.batches.push(this.currentBatch);
}
this.currentBatchOpCount += count;
return this.currentBatch;
}
/** Creates the document, fails if it exists. */
create(ref, data) {
this._getCurrentBatch(1).create(ref, data);
return this;
}
/**
* Creates the document, fails if it exists.
*
* Used for commands that contain serverTimestamp, arrayUnion, etc
*/
transformCreate(ref, data) {
this._getCurrentBatch(2).create(ref, data);
return this;
}
/** Writes the document, creating/overwriting/etc as applicable. */
set(ref, data, options = undefined) {
this._getCurrentBatch(1).set(ref, data, options);
return this;
}
/**
* Writes the document, creating/overwriting/etc as applicable.
*
* Used for commands that contain serverTimestamp, arrayUnion, etc
*/
transformSet(ref, data, options = undefined) {
this._getCurrentBatch(2).set(ref, data, options);
return this;
}
/** Merges data into the document, failing if the document doesn't exist. */
update(ref, data, ...fieldsOrPrecondition) {
this._getCurrentBatch(1).update(ref, data, ...fieldsOrPrecondition);
return this;
}
/**
* Merges data into the document, failing if the document doesn't exist.
*
* Used for commands that contain serverTimestamp, arrayUnion, etc
*/
transformUpdate(ref, data, ...fieldsOrPrecondition) {
this._getCurrentBatch(2).update(ref, data, ...fieldsOrPrecondition);
return this;
}
/** Used when for basic update operations */
delete(ref) {
this._getCurrentBatch(1).delete(ref);
return this;
}
/**
*
* Commits all of the batches to Firestore.
*
* Note: Unlike normal batch operations, this may cause one or more atomic
* writes. One batch may succeed where others fail. By default, if any batch
* fails, it will fail the whole promise. This can be suppressed by passing in
* a truthy value as the first argument and checking the results returned by
* this method.
*
* #param {boolean} [suppressErrors=false] Whether to suppress errors on a
* per-batch basis.
* #return {firestore.WriteResult[]} array containing an array of
* `WriteResult` objects (and error-batch pairs if `suppressErrors=true`),
* for each batch.
*/
commit(suppressErrors = false) {
this.committed = true;
const mapCallback = suppressErrors
? (batch) => batch.commit().catch((error) => ({ error, batch }))
: (batch) => batch.commit();
return Promise.all(this.batches.map(mapCallback));
}
}
How to use SignalR with Angular 2?
How to manually run change detection when receiving data from SignalR?
I recently wrote an article that demonstrates one way to integrate Angular 2 and SignalR using a "channel/event" model:
https://blog.sstorie.com/integrating-angular-2-and-signalr-part-2-of-2/
I don't think just linking to another site is considered appropriate, so here's the core of the Angular 2 service that exposes SignalR:
import {Injectable, Inject} from "angular2/core";
import Rx from "rxjs/Rx";
/**
* When SignalR runs it will add functions to the global $ variable
* that you use to create connections to the hub. However, in this
* class we won't want to depend on any global variables, so this
* class provides an abstraction away from using $ directly in here.
*/
export class SignalrWindow extends Window {
$: any;
}
export enum ConnectionState {
Connecting = 1,
Connected = 2,
Reconnecting = 3,
Disconnected = 4
}
export class ChannelConfig {
url: string;
hubName: string;
channel: string;
}
export class ChannelEvent {
Name: string;
ChannelName: string;
Timestamp: Date;
Data: any;
Json: string;
constructor() {
this.Timestamp = new Date();
}
}
class ChannelSubject {
channel: string;
subject: Rx.Subject<ChannelEvent>;
}
/**
* ChannelService is a wrapper around the functionality that SignalR
* provides to expose the ideas of channels and events. With this service
* you can subscribe to specific channels (or groups in signalr speak) and
* use observables to react to specific events sent out on those channels.
*/
#Injectable()
export class ChannelService {
/**
* starting$ is an observable available to know if the signalr
* connection is ready or not. On a successful connection this
* stream will emit a value.
*/
starting$: Rx.Observable<any>;
/**
* connectionState$ provides the current state of the underlying
* connection as an observable stream.
*/
connectionState$: Rx.Observable<ConnectionState>;
/**
* error$ provides a stream of any error messages that occur on the
* SignalR connection
*/
error$: Rx.Observable<string>;
// These are used to feed the public observables
//
private connectionStateSubject = new Rx.Subject<ConnectionState>();
private startingSubject = new Rx.Subject<any>();
private errorSubject = new Rx.Subject<any>();
// These are used to track the internal SignalR state
//
private hubConnection: any;
private hubProxy: any;
// An internal array to track what channel subscriptions exist
//
private subjects = new Array<ChannelSubject>();
constructor(
#Inject(SignalrWindow) private window: SignalrWindow,
#Inject("channel.config") private channelConfig: ChannelConfig
) {
if (this.window.$ === undefined || this.window.$.hubConnection === undefined) {
throw new Error("The variable '$' or the .hubConnection() function are not defined...please check the SignalR scripts have been loaded properly");
}
// Set up our observables
//
this.connectionState$ = this.connectionStateSubject.asObservable();
this.error$ = this.errorSubject.asObservable();
this.starting$ = this.startingSubject.asObservable();
this.hubConnection = this.window.$.hubConnection();
this.hubConnection.url = channelConfig.url;
this.hubProxy = this.hubConnection.createHubProxy(channelConfig.hubName);
// Define handlers for the connection state events
//
this.hubConnection.stateChanged((state: any) => {
let newState = ConnectionState.Connecting;
switch (state.newState) {
case this.window.$.signalR.connectionState.connecting:
newState = ConnectionState.Connecting;
break;
case this.window.$.signalR.connectionState.connected:
newState = ConnectionState.Connected;
break;
case this.window.$.signalR.connectionState.reconnecting:
newState = ConnectionState.Reconnecting;
break;
case this.window.$.signalR.connectionState.disconnected:
newState = ConnectionState.Disconnected;
break;
}
// Push the new state on our subject
//
this.connectionStateSubject.next(newState);
});
// Define handlers for any errors
//
this.hubConnection.error((error: any) => {
// Push the error on our subject
//
this.errorSubject.next(error);
});
this.hubProxy.on("onEvent", (channel: string, ev: ChannelEvent) => {
//console.log(`onEvent - ${channel} channel`, ev);
// This method acts like a broker for incoming messages. We
// check the interal array of subjects to see if one exists
// for the channel this came in on, and then emit the event
// on it. Otherwise we ignore the message.
//
let channelSub = this.subjects.find((x: ChannelSubject) => {
return x.channel === channel;
}) as ChannelSubject;
// If we found a subject then emit the event on it
//
if (channelSub !== undefined) {
return channelSub.subject.next(ev);
}
});
}
/**
* Start the SignalR connection. The starting$ stream will emit an
* event if the connection is established, otherwise it will emit an
* error.
*/
start(): void {
// Now we only want the connection started once, so we have a special
// starting$ observable that clients can subscribe to know know if
// if the startup sequence is done.
//
// If we just mapped the start() promise to an observable, then any time
// a client subscried to it the start sequence would be triggered
// again since it's a cold observable.
//
this.hubConnection.start()
.done(() => {
this.startingSubject.next();
})
.fail((error: any) => {
this.startingSubject.error(error);
});
}
/**
* Get an observable that will contain the data associated with a specific
* channel
* */
sub(channel: string): Rx.Observable<ChannelEvent> {
// Try to find an observable that we already created for the requested
// channel
//
let channelSub = this.subjects.find((x: ChannelSubject) => {
return x.channel === channel;
}) as ChannelSubject;
// If we already have one for this event, then just return it
//
if (channelSub !== undefined) {
console.log(`Found existing observable for ${channel} channel`)
return channelSub.subject.asObservable();
}
//
// If we're here then we don't already have the observable to provide the
// caller, so we need to call the server method to join the channel
// and then create an observable that the caller can use to received
// messages.
//
// Now we just create our internal object so we can track this subject
// in case someone else wants it too
//
channelSub = new ChannelSubject();
channelSub.channel = channel;
channelSub.subject = new Rx.Subject<ChannelEvent>();
this.subjects.push(channelSub);
// Now SignalR is asynchronous, so we need to ensure the connection is
// established before we call any server methods. So we'll subscribe to
// the starting$ stream since that won't emit a value until the connection
// is ready
//
this.starting$.subscribe(() => {
this.hubProxy.invoke("Subscribe", channel)
.done(() => {
console.log(`Successfully subscribed to ${channel} channel`);
})
.fail((error: any) => {
channelSub.subject.error(error);
});
},
(error: any) => {
channelSub.subject.error(error);
});
return channelSub.subject.asObservable();
}
// Not quite sure how to handle this (if at all) since there could be
// more than 1 caller subscribed to an observable we created
//
// unsubscribe(channel: string): Rx.Observable<any> {
// this.observables = this.observables.filter((x: ChannelObservable) => {
// return x.channel === channel;
// });
// }
/** publish provides a way for calles to emit events on any channel. In a
* production app the server would ensure that only authorized clients can
* actually emit the message, but here we're not concerned about that.
*/
publish(ev: ChannelEvent): void {
this.hubProxy.invoke("Publish", ev);
}
}
Then a component could use this service by subscribing (not in the rxjs sense...) to a specific channel, and reacting to specific events emitted:
import {Component, OnInit, Input} from "angular2/core";
import {Http, Response} from "angular2/http";
import Rx from "rxjs/Rx";
import {ChannelService, ChannelEvent} from "./services/channel.service";
class StatusEvent {
State: string;
PercentComplete: number;
}
#Component({
selector: 'task',
template: `
<div>
<h4>Task component bound to '{{eventName}}'</h4>
</div>
<div class="commands">
<textarea
class="console"
cols="50"
rows="15"
disabled
[value]="messages"></textarea>
<div class="commands__input">
<button (click)="callApi()">Call API</button>
</div>
</div>
`
})
export class TaskComponent implements OnInit {
#Input() eventName: string;
#Input() apiUrl: string;
messages = "";
private channel = "tasks";
constructor(
private http: Http,
private channelService: ChannelService
) {
}
ngOnInit() {
// Get an observable for events emitted on this channel
//
this.channelService.sub(this.channel).subscribe(
(x: ChannelEvent) => {
switch (x.Name) {
case this.eventName: { this.appendStatusUpdate(x); }
}
},
(error: any) => {
console.warn("Attempt to join channel failed!", error);
}
)
}
private appendStatusUpdate(ev: ChannelEvent): void {
// Just prepend this to the messages string shown in the textarea
//
let date = new Date();
switch (ev.Data.State) {
case "starting": {
this.messages = `${date.toLocaleTimeString()} : starting\n` + this.messages;
break;
}
case "complete": {
this.messages = `${date.toLocaleTimeString()} : complete\n` + this.messages;
break;
}
default: {
this.messages = `${date.toLocaleTimeString()} : ${ev.Data.State} : ${ev.Data.PercentComplete} % complete\n` + this.messages;
}
}
}
callApi() {
this.http.get(this.apiUrl)
.map((res: Response) => res.json())
.subscribe((message: string) => { console.log(message); });
}
}
I tried to map the SignalR concepts into observables, but I'm still learning how to effectively use RxJS. In any case I hope that helps show how this might work in the context of an Angular 2 app.
You can also try using ng2-signalr.
npm install ng2-signalr --save
takes care of ng2 change detection using zones
allows your server events to be listened to using rxjs.
Here is the link to the source.
You didn't specified wich syntax you're using to develop your Angular 2 app.
I will assume you're using typescript.
One approach is use Definitely Typed files.
1 - You'll need to download a Definitely Typed JQuery:
https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/jquery/jquery.d.ts
2 - After this, download a Definitely typed SignalR:
https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/signalr/signalr.d.ts
3 - Add the JQuery refence in your Component:
/// <reference path="../jquery.d.ts" />
4 - Now, you can call SignalR methods with intelissense. But you will need to use the Late Binding approach:
var connection = $.hubConnection();
var proxy = connection.createHubProxy(proxy.on("newOrder", (order) => console.log(order));
connection.start();
As far as examples go, there probably aren't any yet. Welcome to the beginning of a framework. But do keep checking over time because as popularity and adoption increases, there will sure to be many examples.
As far as running change detection, that's a very vague question as angular2's change detection is now very different, and much improved.
My approach is to just let angular2 handle it, and not trigger a manual change detection at all as most of the time Angular2 picks up on the change and re-renders the view.
If that does not work, then the next step is to trigger .run() on the NgZone
example:
import {NgZone, Component} from 'angular2/core';
#Component({...})
export class MyComponent{
myProperty: string = 'Hello';
constructor(myService: MyService, ngZone: NgZone){}
doSomething(){
this.myService.doSomething().then(x => {
this.ngZone.run(() => {
this.myProperty = x;
});
});
}
}
Again though, I have found that even working with asynchronous code, angular2 usually picks up on the change without using ngZone at all.
I'm trying to port an existing AJAX app to Flex, and having trouble with the encoding of parameters sent to the backend service.
When trying to perform the action of deleting a contact, the existing app performs a POST, sending the the following: (captured with firebug)
contactRequest.contacts[0].contactId=2c33ddc6012a100096326b40a501ec72
So, I create the following code:
var service:HTTPService;
function initalizeService():void
{
service = new HTTPService();
service.url = "http://someservice";
service.method = 'POST';
}
public function sendReq():void
{
var params:Object = new Object();
params['contactRequest.contacts[0].contactId'] = '2c33ddc6012a100097876b40a501ec72';
service.send(params);
}
In firebug, I see this sent out as follows:
Content-type: application/x-www-form-urlencoded
Content-length: 77
contactRequest%2Econtacts%5B0%5D%2EcontactId=2c33ddc6012a100097876b40a501ec72
Flex is URL encoding the params before sending them, and we're getting an error returned from the server.
How do I disable this encoding, and get the params sent as-is, without the URL encoding?
I feel like the contentType property should be the key - but neither of the defined values work.
Also, I've considered writing a SerializationFilter, but this seems like overkill - is there a simpler way?
Writing a SerializtionFilter seemed to do the trick:
public class MyFilter extends SerializationFilter
{
public function MyFilter()
{
super();
}
override public function serializeBody(operation:AbstractOperation, obj:Object):Object
{
var s:String = "";
var classinfo:Object = ObjectUtil.getClassInfo(obj);
for each (var p:* in classinfo.properties)
{
var val:* = obj[p];
if (val != null)
{
if (s.length > 0)
s += "&";
s += StringUtil.substitute("{0}={1}",p,val);
}
}
return s;
}
}
I'd love to know any alternative solutions that don't involve doing this though!
How do I use crossdomain with ftp?
I am trying to do a "hello world" level test of FTP in Flex, but for three days now, I cannot overcome the issue with how to coerce flex into accepting my crossdomain policy - even for testing purposes.
Here is my code: The exact error text follows.
<?xml version="1.0" encoding="utf-8"?>
<mx:Application xmlns:mx="http://www.adobe.com/2006/mxml" initialize="onInitialize()" layout="vertical">
<mx:Script>
<![CDATA[
import mx.utils.*;
import mx.controls.Alert;
private var fileRef:FileReference;
private var fileSize:uint;
private var fileContents:ByteArray;
//you need to initiate two scokets one for sending
//commands and second for sending data to FTP Server
//socket for sending commands to FTP
private var s:Socket
//responce from FTP
private var ftpResponce:String;
//socket for sending Data to FTP
private var dataChannelSocket:Socket;
//responce from FTP when sending Data to FTP
private var dataResponce:String;
//will hold the IP address of new socket created by FTP
private var dataChannelIP:String;
//will hold the Port number created by FTP
private var dataChannelPort:int;
private var user:String="I have the right user"; //FTP usernae
private var pass:String="the pw is correct"; //FTP Password
private function receiveReply(e:ProgressEvent):void {
ftpResponce=s.readUTFBytes(s.bytesAvailable)
var serverResponse:Number=Number(ftpResponce.substr(0, 3));
if (ftpResponce.indexOf('227') > -1) {
//get the ip from the string response
var temp:Object=ftpResponce.substring(ftpResponce.indexOf("(") + 1
, ftpResponce.indexOf(")"));
var dataChannelSocket_temp:Object=temp.split(",");
dataChannelIP=dataChannelSocket_temp.slice(0, 4).join(".");
dataChannelPort=parseInt(dataChannelSocket_temp[4]) * 256 +
int(dataChannelSocket_temp[5]);
//create new Data Socket based on dataChannelSocket and dataChannelSocket port
dataChannelSocket=new Socket(dataChannelIP, dataChannelPort);
dataChannelSocket.addEventListener(ProgressEvent.SOCKET_DATA, receiveData);
}
//few FTP Responce Codes
switch (String(serverResponse)) {
case "220":
//FTP Server ready responce
break;
case "331":
//User name okay, need password
break;
case "230":
//User logged in
break;
case "250":
//CWD command successful
break;
case "227":
//Entering Passive Mode (h1,h2,h3,h4,p1,p2).
break;
default:
}
//for more please
//http://http://www.altools.com/image/support/alftp/ALFTP_35_help/
//FTP_response_codes_rfc_959_messages.htm
traceData(ftpResponce);
}
private function receiveData(e:ProgressEvent):void {
dataResponce=dataChannelSocket.readUTFBytes(
dataChannelSocket.bytesAvailable);
traceData("dataChannelSocket_response—>" + dataResponce);
}
private function showError(e:IOErrorEvent):void {
traceData("Error—>" + e.text);
}
private function showSecError(e:SecurityErrorEvent):void {
traceData("SecurityError–>" + e.text);
}
private function onInitialize():void {
Security.loadPolicyFile("http://www.myUrlIsCorrectInMyProgram.com/crossdomain.xml");
}
private function createRemoteFile(fileName:String):void {
if (fileName != null && fileName != "") {
s.writeUTFBytes("STOR " + fileName + "\n");
s.flush();
}
}
private function sendData():void {
fileContents=fileRef.data as ByteArray;
fileSize=fileRef.size;
dataChannelSocket.writeBytes(fileContents, 0, fileSize);
dataChannelSocket.flush();
}
//initialize when application load
private function upLoad():void {
fileRef=new FileReference();
//some eventlistener
fileRef.addEventListener(Event.SELECT, selectEvent);
fileRef.addEventListener(Event.OPEN, onFileOpen);
//this function connects to the ftp server
connect();
//send the usernae and password
this.userName(user);
this.passWord(pass);
//if you want to change the directory for upload file
this.changeDirectory("/test/"); //directory name
//enter into PASSV Mode
s.writeUTFBytes("PASV\n");
s.flush();
}
private function onFileOpen(event:Event):void {
}
private function traceData(event:Object):void {
var tmp:String="================================\n";
ta.text+=event.toString() + "\n";
ta.verticalScrollPosition+=20;
}
private function ioErrorEvent(event:IOErrorEvent):void {
Alert.show("IOError:" + event.text);
}
private function selectEvent(event:Event):void {
btn_upload.enabled=true;
filename.text=fileRef.name;
fileRef.load();
}
private function uploadFile():void {
createRemoteFile(fileRef.name);
sendData();
}
private function connect():void {
s=new Socket("ftp.myUrlIsCorrectInMyProgram.com", 21);
s.addEventListener(ProgressEvent.SOCKET_DATA, receiveReply);
s.addEventListener(IOErrorEvent.IO_ERROR, showError);
s.addEventListener(SecurityErrorEvent.SECURITY_ERROR, showSecError);
s.addEventListener(Event.CONNECT, onSocketConnect);
s.addEventListener(Event.CLOSE, onSocketClose);
s.addEventListener(Event.ACTIVATE, onSocketAtivate);
}
private function onSocketConnect(evt:Event):void {
//traceData("OnSocketConnect–>"+evt.target.toString());
}
private function onSocketClose(evt:Event):void {
//traceData("onSocketClose–>"+evt.target.toString());
}
private function onSocketAtivate(evt:Event):void {
//traceData("onSocketAtivate–>"+evt.target.toString());
}
private function userName(str:String):void {
sendCommand("USER " + str);
}
private function passWord(str:String):void {
sendCommand("PASS " + str);
}
private function changeDirectory(str:String):void {
sendCommand("CWD " + str);
}
private function sendCommand(arg:String):void {
arg+="\n";
s.writeUTFBytes(arg);
s.flush();
}
]]>
[SWF] /FTP-debug/FTP.swf - 739,099 bytes after decompression
Warning: Domain www.myUrlIsCorrectInMyProgram.com does not specify a meta-policy. Applying default meta-policy 'master-only'. This configuration is deprecated. See http://www.adobe.com/go/strict_policy_files to fix this problem.
Warning: Timeout on xmlsocket://ftp.myUrlIsCorrectInMyProgram.com:843 (at 3 seconds) while waiting for socket policy file. This should not cause any problems, but see http://www.adobe.com/go/strict_policy_files for an explanation.
Warning: [strict] Ignoring policy file at xmlsocket://ftp.myUrlIsCorrectInMyProgram.com:21 due to incorrect syntax. See http://www.adobe.com/go/strict_policy_files to fix this problem.
* Security Sandbox Violation *
Connection to ftp.myUrlIsCorrectInMyProgram.com:21 halted - not permitted from http://localhost/FTP-debug/FTP.swf
Error: Request for resource at xmlsocket://ftp.myUrlIsCorrectInMyProgram.com:21 by requestor from http://localhost/FTP-debug/FTP.swf is denied due to lack of policy file permissions.
The "Information" at the URL's listed above is categorically unintelligable to me.
Please, someone help!
I also had the same issue but was able to fix it using the flash policy server that I downloaded from http://www.flash-resources.net/download.html.
I ran this on the same machine that I have my tomcat server installed and made the call
Security.loadPolicyFile("xmlsocket://:843");
from the application and it worked perfectly. No errors.
I also had the same issue but was able to fix it using the flash policy server that I downloaded from here.
I ran this on the same machine that I have my tomcat server installed and made the call
Security.loadPolicyFile("xmlsocket://Machine Name:843");
from the application and it worked perfectly. No errors.
Watch the typo around the Machine Name in the last post.
See the crossdomain spec:
http://learn.adobe.com/wiki/download/attachments/64389123/CrossDomain_PolicyFile_Specification.pdf?version=1
This covers the warning you have and can help you get this working.
I'm creating a facelets template for all my company's internal applications. Its appearance is based on the skin which the user selects (like gmail themes).
It makes sense to store the user's preferred skin in a cookie.
My "user-preferences" WAR can see this cookie. However, my other applications are unable to find the cookie. They are on the same domain/subdomain as the user-preferences WAR.
Is there some reason for this?
Here is my bean which is used to create/find the preferred skin. This same file is used in all the projects:
// BackingBeanBase is just a class with convenience methods. Doesn't
// really affect anything here.
public class UserSkinBean extends BackingBeanBase {
private final static String SKIN_COOKIE_NAME = "preferredSkin";
private final static String DEFAULT_SKIN_NAME = "classic";
/**
* Get the name of the user's preferred skin. If this value wasn't set previously,
* it will return a default value.
*
* #return
*/
public String getSkinName() {
Cookie skinNameCookie = findSkinCookie();
if (skinNameCookie == null) {
skinNameCookie = initializeSkinNameCookie(DEFAULT_SKIN_NAME);
addCookie(skinNameCookie);
}
return skinNameCookie.getValue();
}
/**
* Set the skin to the given name. Must be the name of a valid richFaces skin.
*
* #param skinName
*/
public void setSkinName(String skinName) {
if (skinName == null) {
skinName = DEFAULT_SKIN_NAME;
}
Cookie skinNameCookie = findSkinCookie();
if (skinNameCookie == null) {
skinNameCookie = initializeSkinNameCookie(skinName);
}
else {
skinNameCookie.setValue(skinName);
}
addCookie(skinNameCookie);
}
private void addCookie(Cookie skinNameCookie) {
((HttpServletResponse)getFacesContext().getExternalContext().getResponse()).addCookie(skinNameCookie);
}
private Cookie initializeSkinNameCookie(String skinName) {
Cookie ret = new Cookie(SKIN_COOKIE_NAME, skinName);
ret.setComment("The purpose of this cookie is to hold the name of the user's preferred richFaces skin.");
//set the max age to one year.
ret.setMaxAge(60 * 60 * 24 * 365);
ret.setPath("/");
return ret;
}
private Cookie findSkinCookie() {
Cookie[] cookies = ((HttpServletRequest)getFacesContext().getExternalContext().getRequest()).getCookies();
Cookie ret = null;
for (Cookie cookie : cookies) {
if (cookie.getName().equals(SKIN_COOKIE_NAME)) {
ret = cookie;
break;
}
}
return ret;
}
}
Can anyone see what I'm doing wrong?
Update: I've narrowed it down a bit...it works fine in FF, but IE still doesn't like it (of course).
Thanks,
Zack
I think you need to assign domain/subdomain to the cookies.
Like, (Note that the domain should start with a dot)
ret.setDomain(".test.com");
ret.setDomain(".test.co.uk");
http://www.apl.jhu.edu/~hall/java/Servlet-Tutorial/Servlet-Tutorial-Cookies.html
I found a solution.
I just used javascript on the client-side to create the cookies.
This worked fine.