Not be able to fetch data based on custom dimensions - google-analytics

Custom dimension
`public class GetUsersData {
private static final String APPLICATION_NAME = "Wtr-web";
private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
private static final String KEY_FILE_LOCATION = "D:\\ga3\\credentials.json";
// private static final String USER_VIEW_ID = "281667139"; // userIdView
private static final String CLIENT_VIEW_ID = "281540591"; // all page
// private static Map<String, String> convertedUserMap = new LinkedHashMap<>();
public UsersData getUsersData() throws Exception {
// Create the DateRange object.
DateRange dateRange = new DateRange();
dateRange.setStartDate("30DaysAgo");
dateRange.setEndDate("today");
return getStatistics(initializeAnalyticsReporting(), dateRange, CLIENT_VIEW_ID);
}
private AnalyticsReporting initializeAnalyticsReporting() throws GeneralSecurityException, Exception {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = GoogleCredential.fromStream(new FileInputStream(KEY_FILE_LOCATION))
.createScoped(Collections.singletonList(AnalyticsReportingScopes.ANALYTICS_READONLY));
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
}
private UsersData getStatistics(AnalyticsReporting service, DateRange dateRange, String userViewId) {
// Create the Metrics object.
Metric visits = new Metric().setExpression("ga:visits");
Dimension UserId = new Dimension().setName("ga:UserID");
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(userViewId).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(visits))
.setDimensions(Arrays.asList(UserId));
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(Arrays.asList(request));
GetReportsResponse response = null;
try {
response = service.reports().batchGet(getReport).execute();
} catch (Exception e) {
e.printStackTrace();
System.out.println(" data not found");
}
if (response != null) {
for (Report report : response.getReports()) {
ColumnHeader columnHeader = report.getColumnHeader();
ClassInfo classInfo = report.getClassInfo();
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
System.out.println("No data found for the request.");
}
for (ReportRow row : rows) {
List<String> dimensions = row.getDimensions();
List<DateRangeValues> metrics = row.getMetrics();
}
}
}
return new UsersData();
}
}`
com.google.api.client.googleapis.json.GoogleJsonResponseException: 400 Bad Request
POST https://analyticsreporting.googleapis.com/v4/reports:batchGet
{
"code" : 400,
"errors" : [ {
"domain" : "global",
"message" : "Unknown dimension(s): ga:UserID\nFor details see https://developers.google.com/analytics/devguides/reporting/core/dimsmets.",
"reason" : "badRequest"
} ],
"message" : "Unknown dimension(s): ga:UserID\nFor details see https://developers.google.com/analytics/devguides/reporting/core/dimsmets.",
"status" : "INVALID_ARGUMENT"
}
UserID is my custom dimension. how can i resolve this issue.

Related

How to send a zipped file to S3 bucket from Apex?

Folks,
I am trying to move data to s3 from Salesforce using apex class. I have been told by the data manager to send the data in zip/gzip format to the S3 bucket for storage cost savings.
I have simply tried to do a request.setCompressed(true); as I've read it compresses the body before sending it to the endpoint. Code below:
HttpRequest request = new HttpRequest();
request.setEndpoint('callout:'+DATA_NAMED_CRED+'/'+URL+'/'+generateUniqueTimeStampforSuffix());
request.setMethod('PUT');
request.setBody(JSON.serialize(data));
request.setCompressed(true);
request.setHeader('Content-Type','application/json');
But no matter what I always receive this:
<Error><Code>XAmzContentSHA256Mismatch</Code><Message>The provided 'x-amz-content-sha256' header does not match what was computed.</Message><ClientComputedContentSHA256>fd31b2b9115ef77e8076b896cb336d21d8f66947210ffcc9c4d1971b2be3bbbc</ClientComputedContentSHA256><S3ComputedContentSHA256>1e7f2115e60132afed9e61132aa41c3224c6e305ad9f820e6893364d7257ab8d</S3ComputedContentSHA256>
I have tried multiple headers too, like setting the content type to gzip/zip, etc.
Any pointers in the right direction would be appreciated.
I had a good amount of headaches attempting to do a similar thing. I feel your pain.
The following code has worked for us using lambda functions; you can try modifying it and see what happens.
public class AwsApiGateway {
// Things we need to know about the service. Set these values in init()
String host, payloadSha256;
String resource;
String service = 'execute-api';
String region;
public Url endpoint;
String accessKey;
String stage;
string secretKey;
HttpMethod method = HttpMethod.XGET;
// Remember to set "payload" here if you need to specify a body
// payload = Blob.valueOf('some-text-i-want-to-send');
// This method helps prevent leaking secret key,
// as it is never serialized
// Url endpoint;
// HttpMethod method;
Blob payload;
// Not used externally, so we hide these values
Blob signingKey;
DateTime requestTime;
Map<String, String> queryParams = new map<string,string>(), headerParams = new map<string,string>();
void init(){
if (payload == null) payload = Blob.valueOf('');
requestTime = DateTime.now();
createSigningKey(secretKey);
}
public AwsApiGateway(String resource){
this.stage = AWS_LAMBDA_STAGE
this.resource = '/' + stage + '/' + resource;
this.region = AWS_REGION;
this.endpoint = new Url(AWS_ENDPOINT);
this.accessKey = AWS_ACCESS_KEY;
this.secretKey = AWS_SECRET_KEY;
}
// Make sure we can't misspell methods
public enum HttpMethod { XGET, XPUT, XHEAD, XOPTIONS, XDELETE, XPOST }
public void setMethod (HttpMethod method){
this.method = method;
}
public void setPayload (string payload){
this.payload = Blob.valueOf(payload);
}
// Add a header
public void setHeader(String key, String value) {
headerParams.put(key.toLowerCase(), value);
}
// Add a query param
public void setQueryParam(String key, String value) {
queryParams.put(key.toLowerCase(), uriEncode(value));
}
// Create a canonical query string (used during signing)
String createCanonicalQueryString() {
String[] results = new String[0], keys = new List<String>(queryParams.keySet());
keys.sort();
for(String key: keys) {
results.add(key+'='+queryParams.get(key));
}
return String.join(results, '&');
}
// Create the canonical headers (used for signing)
String createCanonicalHeaders(String[] keys) {
keys.addAll(headerParams.keySet());
keys.sort();
String[] results = new String[0];
for(String key: keys) {
results.add(key+':'+headerParams.get(key));
}
return String.join(results, '\n')+'\n';
}
// Create the entire canonical request
String createCanonicalRequest(String[] headerKeys) {
return String.join(
new String[] {
method.name().removeStart('X'), // METHOD
new Url(endPoint, resource).getPath(), // RESOURCE
createCanonicalQueryString(), // CANONICAL QUERY STRING
createCanonicalHeaders(headerKeys), // CANONICAL HEADERS
String.join(headerKeys, ';'), // SIGNED HEADERS
payloadSha256 // SHA256 PAYLOAD
},
'\n'
);
}
// We have to replace ~ and " " correctly, or we'll break AWS on those two characters
string uriEncode(String value) {
return value==null? null: EncodingUtil.urlEncode(value, 'utf-8').replaceAll('%7E','~').replaceAll('\\+','%20');
}
// Create the entire string to sign
String createStringToSign(String[] signedHeaders) {
String result = createCanonicalRequest(signedHeaders);
return String.join(
new String[] {
'AWS4-HMAC-SHA256',
headerParams.get('date'),
String.join(new String[] { requestTime.formatGMT('yyyyMMdd'), region, service, 'aws4_request' },'/'),
EncodingUtil.convertToHex(Crypto.generateDigest('sha256', Blob.valueof(result)))
},
'\n'
);
}
// Create our signing key
void createSigningKey(String secretKey) {
signingKey = Crypto.generateMac('hmacSHA256', Blob.valueOf('aws4_request'),
Crypto.generateMac('hmacSHA256', Blob.valueOf(service),
Crypto.generateMac('hmacSHA256', Blob.valueOf(region),
Crypto.generateMac('hmacSHA256', Blob.valueOf(requestTime.formatGMT('yyyyMMdd')), Blob.valueOf('AWS4'+secretKey))
)
)
);
}
// Create all of the bits and pieces using all utility functions above
public HttpRequest createRequest() {
init();
payloadSha256 = EncodingUtil.convertToHex(Crypto.generateDigest('sha-256', payload));
setHeader('date', requestTime.formatGMT('yyyyMMdd\'T\'HHmmss\'Z\''));
if(host == null) {
host = endpoint.getHost();
}
setHeader('host', host);
HttpRequest request = new HttpRequest();
request.setMethod(method.name().removeStart('X'));
if(payload.size() > 0) {
setHeader('Content-Length', String.valueOf(payload.size()));
request.setBodyAsBlob(payload);
}
String finalEndpoint = new Url(endpoint, resource).toExternalForm(),
queryString = createCanonicalQueryString();
if(queryString != '') {
finalEndpoint += '?'+queryString;
}
request.setEndpoint(finalEndpoint);
for(String key: headerParams.keySet()) {
request.setHeader(key, headerParams.get(key));
}
String[] headerKeys = new String[0];
String stringToSign = createStringToSign(headerKeys);
request.setHeader(
'Authorization',
String.format(
'AWS4-HMAC-SHA256 Credential={0}, SignedHeaders={1},Signature={2}',
new String[] {
String.join(new String[] { accessKey, requestTime.formatGMT('yyyyMMdd'), region, service, 'aws4_request' },'/'),
String.join(headerKeys,';'), EncodingUtil.convertToHex(Crypto.generateMac('hmacSHA256', Blob.valueOf(stringToSign), signingKey))}
));
system.debug(json.serializePretty(request.getEndpoint()));
return request;
}
// Actually perform the request, and throw exception if response code is not valid
public HttpResponse sendRequest(Set<Integer> validCodes) {
HttpResponse response = new Http().send(createRequest());
if(!validCodes.contains(response.getStatusCode())) {
system.debug(json.deserializeUntyped(response.getBody()));
}
return response;
}
// Same as above, but assume that only 200 is valid
// This method exists because most of the time, 200 is what we expect
public HttpResponse sendRequest() {
return sendRequest(new Set<Integer> { 200 });
}
// TEST METHODS
public static string getEndpoint(string attribute){
AwsApiGateway api = new AwsApiGateway(attribute);
return api.createRequest().getEndpoint();
}
public static string getEndpoint(string attribute, map<string, string> params){
AwsApiGateway api = new AwsApiGateway(attribute);
for (string key: params.keySet()){
api.setQueryParam(key, params.get(key));
}
return api.createRequest().getEndpoint();
}
public class EndpointConfig {
string resource;
string attribute;
list<object> items;
map<string,string> params;
public EndpointConfig(string resource, string attribute, list<object> items){
this.items = items;
this.resource = resource;
this.attribute = attribute;
}
public EndpointConfig setQueryParams(map<string,string> parameters){
params = parameters;
return this;
}
public string endpoint(){
if (params == null){
return getEndpoint(resource);
} else return getEndpoint(resource + '/' + attribute, params);
}
public SingleRequestMock mockResponse(){
return new SingleRequestMock(200, 'OK', json.serialize(items), null);
}
}
}

Xamarin Essentials Preferences and saving list

I am trying to save values with Xamarin Essentials Preferences, however I am getting an exception as my SavedList is always null and then as exception I have this Unhandled Exception:
Newtonsoft.Json.JsonReaderException: Error parsing boolean value. Path '', line 1, position 1. but I am only passing the string value to the saved list
private void ExecuteMultiPageCommand(bool value)
{
var recognitionProviderSettings = new RecognitionProviderSettings
{SettingFields = new List<SettingField>()};
var set = new SettingField()
{
ProviderSettingId = "test"
};
AddToList(set.ProviderSettingId);
NotifyPropertyChanged("IsMultiPage");
}
public static class Preference
{
public static List<string> SavedList
{
get
{
var savedList = Deserialize<List<string>>(Preferences.Get(nameof(SavedList), "test"));
return savedList ?? new List<string>();
}
set
{
var serializedList = Serialize(value);
Preferences.Set(nameof(SavedList), serializedList);
}
}
static T Deserialize<T>(string serializedObject) => JsonConvert.DeserializeObject<T>(serializedObject);
static string Serialize<T>(T objectToSerialize) => JsonConvert.SerializeObject(objectToSerialize);
}
void AddToList(string text)
{
var savedList = new List<string>(Preference.SavedList) {text};
Preference.SavedList = savedList;
}

How to use tempdata to return error message

I am trying to use temp data to return messages but it gives an error :
InvalidOperationException: The 'Microsoft.AspNetCore.Mvc.ViewFeatures.Internal.TempDataSerializer' cannot serialize an object of type
I am already using
services.AddMvc().AddSessionStateTempDataProvider();
app.UseSession()
services.AddSession(options =>
{
// Set a short timeout for easy testing.
options.IdleTimeout = TimeSpan.FromSeconds(10);
options.Cookie.HttpOnly = true;
});
Here is my shared
FlashMessages.cshtml :
#using EnduroMotors.ViewModels
#{
var errorMessages = TempData["_error_messages"] as List<FlashMessageModel>
?? new List<FlashMessageModel>();
var warningMessages = TempData["_warning_messages"] as
List<FlashMessageModel> ?? new List<FlashMessageModel>();
var successMessages = TempData["_success_messages"] as
List<FlashMessageModel> ?? new List<FlashMessageModel>();
var infoMessages = TempData["_info_messages"] as List<FlashMessageModel> ??
new List<FlashMessageModel>();
}
Here is my viewmodel :
FlashMessageModel
public class FlashMessageModel
{
public string Title { get; set; }
public string Message { get; set; }
}
And here is use in controller :
Controller
protected void ShowSuccessMessage(string message, string title =
"Success!")
{
var messages =
(List<FlashMessageModel>)TempData["_success_messages"] ?? new
List<FlashMessageModel>();
messages.Add(new FlashMessageModel
{
Title = title,
Message = message
});
TempData["_success_messages"] = messages;
}
using this with return
ShowSuccessMessage("You have completed.");
it should show success message in index with #{Html.RenderPartial("FlashMessages");} but instead it gives
InvalidOperationException: The 'Microsoft.AspNetCore.Mvc.ViewFeatures.Internal.TempDataSerializer' cannot serialize an object of type 'EnduroMotors.ViewModels.FlashMessageModel'.
Microsoft.AspNetCore.Mvc.ViewFeatures.Internal.TempDataSerializer.EnsureObjectCanBeSerialized(object item)
TempData serialises objects to strings for storage. It supports string, int and boolean types natively. If you want to store more complex types, you have to serialise (and deserialise) them yourself. JSON is the recommended format. The following extension methods use the JSON.NET JsonConvert static methods to do this:
public static class TempDataExtensions
{
public static void Set<T>(this ITempDataDictionary tempData, string key, T value) where T : class
{
tempData[key] = JsonConvert.SerializeObject(value);
}
public static T Get<T>(this ITempDataDictionary tempData, string key) where T : class
{
tempData.TryGetValue(key, out object o);
return o ?? JsonConvert.DeserializeObject<T>((string)o);
}
}
You can read more about this here: https://www.learnrazorpages.com/razor-pages/tempdata#limitations

Aggregation of records from Kinesis Stream every 1 min

I am trying to write a Flink program to process a Kinesis Stream. The Kinesis stream comes from AWS DynamoDB stream and represents inserts made in DynamoDB table.
Each record in the Stream can contain multiple insert records. The number of insert records can be variable ( can vary from 1 to 10)
I want to group all the insert records from all the streams within a interval of 1 min and sum the impression count (impressionCount) field
[
{
"country":"NL",
"userOS":"mac",
"createdOn":"2017-08-02 16:22:17.135600",
"trafficType":"D",
"affiliateId":"87",
"placement":"4",
"offerId":"999",
"advertiserId":"139",
"impressionCount":"1",
"uniqueOfferCount":"0"
},
{
"country":"NL",
"userOS":"mac",
"createdOn":"2017-08-02 16:22:17.135600",
"trafficType":"D",
"affiliateId":"85",
"placement":"4",
"offerId":"688",
"advertiserId":"139",
"impressionCount":"1",
"uniqueOfferCount":"0"
}
]
My code:
DataStream<List> kinesisStream = env.addSource(new FlinkKinesisConsumer<>(
"Impressions-Stream", new RawImpressionLogSchema(), consumerConfig));
/** CLASS: RawImpressionLogSchema **/
public class RawImpressionLogSchema implements DeserializationSchema<List> {
#Override
public List<RawImpressionLogRecord> deserialize(byte[] bytes) {
return RawImpressionLogRecord.parseImpressionLog(bytes);
}
#Override
public boolean isEndOfStream(List event) {
return false;
}
#Override
public TypeInformation<List> getProducedType() {
return TypeExtractor.getForClass(List.class);
}
}
/** parse Method **/
public static List<RawImpressionLogRecord> parseImpressionLog(
byte[] impressionLogBytes) {
JsonReader jsonReader = new JsonReader(new InputStreamReader(
new ByteArrayInputStream(impressionLogBytes)));
JsonElement jsonElement = Streams.parse(jsonReader);
if (jsonElement == null) {
throw new IllegalArgumentException(
"Event does not define a eventName field: "
+ new String(impressionLogBytes));
} else {
Type listType = new TypeToken<ArrayList<RawImpressionLogRecord>>(){}.getType();
return gson.fromJson(jsonElement, listType);
}
}
I was able to parse the input and create the kinesisStream. Wanted to know is it the correct way ? and how do I achieve the aggregation.
Also once I have the DataStream, how can I apply the map/filter/group by function on List Stream.
I am new to Flink and any help would be appreciated.
Update
Tried to come with the below code to solve the above use case. But somehow the reduce function is not getting called. Any idea what is wrong in the below code ?
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
DataStream<List<ImpressionLogRecord>> rawRecords = env.addSource(new ImpressionLogDataSourceFunction("C:\\LogFiles\\input.txt"));
DataStream<ImpressionLogRecord> impressionLogDataStream = rawRecords
.flatMap(new Splitter())
.assignTimestampsAndWatermarks(
new BoundedOutOfOrdernessTimestampExtractor<ImpressionLogRecord>(Time.seconds(5)) {
#Override
public long extractTimestamp(
ImpressionLogRecord element) {
return element.getCreatedOn().atZone(ZoneOffset.systemDefault()).toInstant().toEpochMilli();
}
}
);
//impressionLogDataStream.print();
KeyedStream<ImpressionLogRecord, String> keyedImpressionLogDataStream = impressionLogDataStream
.keyBy(impressionLogRecordForKey -> {
StringBuffer groupByKey = new StringBuffer();
groupByKey.append(impressionLogRecordForKey.getCreatedOn().toString().substring(0, 16));
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getOfferId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getAdvertiserId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getAffiliateId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getCountry());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getPlacement());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getTrafficType());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getUserOS());
System.out.println("Call to Group By Function===================" + groupByKey);
return groupByKey.toString();
});
//keyedImpressionLogDataStream.print();
DataStream<ImpressionLogRecord> aggImpressionRecord = keyedImpressionLogDataStream
.timeWindow(Time.minutes(5))
.reduce((prevLogRecord, currentLogRecord) -> {
System.out.println("Calling Reduce Function-------------------------");
ImpressionLogRecord aggregatedImpressionLog = new ImpressionLogRecord();
aggregatedImpressionLog.setOfferId(prevLogRecord.getOfferId());
aggregatedImpressionLog.setCreatedOn(prevLogRecord.getCreatedOn().truncatedTo(ChronoUnit.MINUTES));
aggregatedImpressionLog.setAdvertiserId(prevLogRecord.getAdvertiserId());
aggregatedImpressionLog.setAffiliateId(prevLogRecord.getAffiliateId());
aggregatedImpressionLog.setCountry(prevLogRecord.getCountry());
aggregatedImpressionLog.setPlacement(prevLogRecord.getPlacement());
aggregatedImpressionLog.setTrafficType(prevLogRecord.getTrafficType());
aggregatedImpressionLog.setUserOS(prevLogRecord.getUserOS());
aggregatedImpressionLog.setImpressionCount(prevLogRecord.getImpressionCount() + currentLogRecord.getImpressionCount());
aggregatedImpressionLog.setUniqueOfferCount(prevLogRecord.getUniqueOfferCount() + currentLogRecord.getUniqueOfferCount());
return aggregatedImpressionLog;
});
aggImpressionRecord.print();
Working Code
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
DataStream<List<ImpressionLogRecord>> rawRecords = env.addSource(new ImpressionLogDataSourceFunction("C:\\LogFiles\\input.txt"));
//This method converts the DataStream of List<ImpressionLogRecords> into a single stream of ImpressionLogRecords.
//Also assigns timestamp to each record in the stream
DataStream<ImpressionLogRecord> impressionLogDataStream = rawRecords
.flatMap(new RecordSplitter())
.assignTimestampsAndWatermarks(
new BoundedOutOfOrdernessTimestampExtractor<ImpressionLogRecord>(Time.seconds(5)) {
#Override
public long extractTimestamp(
ImpressionLogRecord element) {
return element.getCreatedOn().atZone(ZoneOffset.systemDefault()).toInstant().toEpochMilli();
}
}
);
//This method groups the records in the stream by a user defined key.
KeyedStream<ImpressionLogRecord, String> keyedImpressionLogDataStream = impressionLogDataStream
.keyBy(impressionLogRecordForKey -> {
StringBuffer groupByKey = new StringBuffer();
groupByKey.append(impressionLogRecordForKey.getCreatedOn().toString().substring(0, 16));
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getOfferId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getAdvertiserId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getAffiliateId());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getCountry());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getPlacement());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getTrafficType());
groupByKey.append("_");
groupByKey.append(impressionLogRecordForKey.getUserOS());
return groupByKey.toString();
});
//This method aggregates the grouped records every 1 min and calculates the sum of impression count and unique offer count.
DataStream<ImpressionLogRecord> aggImpressionRecord = keyedImpressionLogDataStream
.timeWindow(Time.minutes(1))
.reduce((prevLogRecord, currentLogRecord) -> {
ImpressionLogRecord aggregatedImpressionLog = new ImpressionLogRecord();
aggregatedImpressionLog.setOfferId(prevLogRecord.getOfferId());
aggregatedImpressionLog.setCreatedOn(prevLogRecord.getCreatedOn().truncatedTo(ChronoUnit.MINUTES));
aggregatedImpressionLog.setAdvertiserId(prevLogRecord.getAdvertiserId());
aggregatedImpressionLog.setAffiliateId(prevLogRecord.getAffiliateId());
aggregatedImpressionLog.setCountry(prevLogRecord.getCountry());
aggregatedImpressionLog.setPlacement(prevLogRecord.getPlacement());
aggregatedImpressionLog.setTrafficType(prevLogRecord.getTrafficType());
aggregatedImpressionLog.setUserOS(prevLogRecord.getUserOS());
aggregatedImpressionLog.setImpressionCount(prevLogRecord.getImpressionCount() + currentLogRecord.getImpressionCount());
aggregatedImpressionLog.setUniqueOfferCount(prevLogRecord.getUniqueOfferCount() + currentLogRecord.getUniqueOfferCount());
return aggregatedImpressionLog;
});
aggImpressionRecord.print();
aggImpressionRecord.addSink(new ImpressionLogDataSink());
env.execute();
}
public static class RecordSplitter
implements
FlatMapFunction<List<ImpressionLogRecord>, ImpressionLogRecord> {
#Override
public void flatMap(List<ImpressionLogRecord> rawImpressionRecords,
Collector<ImpressionLogRecord> impressionLogRecordCollector)
throws Exception {
for (int i = 0; i < rawImpressionRecords.size(); i++) {
impressionLogRecordCollector.collect(rawImpressionRecords.get(i));
}
}
}`enter code here`

Web API Typeless OData Service with OWIN self-hosting returns 406 Not Acceptable

I'm trying to set up a Web API Typeless OData Service with OWIN self-hosting... =)
But why not working? :~(
This is some code I have partially extracted from all kinds of examples out there...
public class Startup
{
public void Configuration(IAppBuilder appBuilder)
{
var config = new HttpConfiguration();
config.Routes.MapHttpRoute("DefaultApi", "api/{controller}/{id}",
new { id = RouteParameter.Optional });
appBuilder.UseWebApi(config);
}
}
public class Program
{
public static IEdmModel Model = GetEdmModel();
static void Main(string[] args)
{
using (WebApp.Start<Startup>("http://localhost:8080"))
{
Console.WriteLine("Running...");
Console.ReadLine();
}
}
public static IEdmModel GetEdmModel()
{
var model = new EdmModel();
// Create and add product entity type.
var product = new EdmEntityType("NS", "Product");
product.AddKeys(product.AddStructuralProperty("Id", EdmPrimitiveTypeKind.Int32));
product.AddStructuralProperty("Name", EdmPrimitiveTypeKind.String);
product.AddStructuralProperty("Price", EdmPrimitiveTypeKind.Double);
model.AddElement(product);
// Create and add category entity type.
var category = new EdmEntityType("NS", "Category");
category.AddKeys(category.AddStructuralProperty("Id", EdmPrimitiveTypeKind.Int32));
category.AddStructuralProperty("Name", EdmPrimitiveTypeKind.String);
model.AddElement(category);
// Set navigation from product to category.
var propertyInfo = new EdmNavigationPropertyInfo();
propertyInfo.Name = "Category";
propertyInfo.TargetMultiplicity = EdmMultiplicity.One;
propertyInfo.Target = category;
var productCategory = product.AddUnidirectionalNavigation(propertyInfo);
// Create and add entity container.
var container = new EdmEntityContainer("NS", "DefaultContainer");
model.AddElement(container);
// Create and add entity set for product and category.
var products = container.AddEntitySet("Products", product);
var categories = container.AddEntitySet("Categories", category);
products.AddNavigationTarget(productCategory, categories);
return model;
}
}
public class ProductsController : ODataController
{
private static readonly IQueryable<IEdmEntityObject> Products = Enumerable.Range(0, 10).Select(i =>
{
var productType = (IEdmEntityType)Program.Model.FindType("NS.Product");
var categoryType = (IEdmEntityTypeReference)productType.FindProperty("Category").Type;
var product = new EdmEntityObject(productType);
product.TrySetPropertyValue("Id", i);
product.TrySetPropertyValue("Name", "Product " + i);
product.TrySetPropertyValue("Price", i + 0.01);
var category = new EdmEntityObject(categoryType);
category.TrySetPropertyValue("Id", i % 5);
category.TrySetPropertyValue("Name", "Category " + (i % 5));
product.TrySetPropertyValue("Category", category);
return product;
}).AsQueryable();
public EdmEntityObjectCollection Get()
{
// Get Edm type from request.
var path = this.Request.GetODataPath();
var edmType = path.EdmType;
Contract.Assert(edmType.TypeKind == EdmTypeKind.Collection);
var collectionType = edmType as IEdmCollectionType;
var entityType = collectionType.ElementType.Definition as IEdmEntityType;
var model = Request.GetEdmModel();
var queryContext = new ODataQueryContext(model, entityType);
var queryOptions = new ODataQueryOptions(queryContext, Request);
// Apply the query option on the IQueryable here.
return new EdmEntityObjectCollection(new EdmCollectionTypeReference(collectionType, false), Products.ToList());
}
public IEdmEntityObject GetProduct(int key)
{
object id;
var product = Products.Single(p => HasId(p, key));
return product;
}
public IEdmEntityObject GetCategoryFromProduct(int key)
{
object id;
var product = Products.Single(p => HasId(p, key));
object category;
if (product.TryGetPropertyValue("Category", out category))
{
return (IEdmEntityObject)category;
}
return null;
}
public IEdmEntityObject Post(IEdmEntityObject entity)
{
// Get Edm type from request.
var path = Request.GetODataPath();
var edmType = path.EdmType;
Contract.Assert(edmType.TypeKind == EdmTypeKind.Collection);
var entityType = (edmType as IEdmCollectionType).ElementType.AsEntity();
// Do something with the entity object here.
return entity;
}
private bool HasId(IEdmEntityObject product, int key)
{
object id;
return product.TryGetPropertyValue("Id", out id) && (int)id == key;
}
}
The result I get is:
{StatusCode: 406, ReasonPhrase: 'Not Acceptable', Version: 1.1, Content: System.Net.Http.StreamContent, Headers:
{
Date: Mon, 12 May 2014 18:08:25 GMT
Server: Microsoft-HTTPAPI/2.0
Content-Length: 0
}}
From running this:
var client = new HttpClient();
var response = client.GetAsync("http://localhost:8080/api/Products").Result;
If you are using OData V4, you need to make a change in your controller:
Old:
using System.Web.Http.OData;
New:
using System.Web.OData;

Resources