CircularProgressIndicator doesn't animated within ComposeView in Fragment - android-fragments

I am having this
in a Fragment
return ComposeView(requireContext()).apply {
config = ChartsConfig()
setViewCompositionStrategy(ViewCompositionStrategy.DisposeOnViewTreeLifecycleDestroyed)
setContent { MaterialTheme {
val progressValue = 1.0f
val infiniteTransition = rememberInfiniteTransition()
val progressAnimationValue by infiniteTransition.animateFloat(
initialValue = 0.0f,
targetValue = progressValue,
animationSpec = infiniteRepeatable(
animation = tween(durationMillis = 4000, easing = LinearEasing)
)
)
println(progressAnimationValue)
CircularProgressIndicator(progress = progressAnimationValue)
} }
}
If I try the same composable within a compose test project activity it animates just fine.
However, it doesnt' when I put it in a fragment as shown here

Related

Issue with VkRenderPass load operation for Skybox rendering

I seem to have another issue when it comes to renderpasses in Vulkan.
Drawing my scene, I first submit a commandbuffer to render a sky using atmospheric scattering onto a cubemap, to which I then use for my forward pass to draw out the sky and sun.
The renderpass used when drawing the skybox and storing into a cubemap for sampling:
m_pFrameBuffer = rhi->CreateFrameBuffer();
VkImageView attachment = m_RenderTexture->View();
VkAttachmentDescription attachDesc = CreateAttachmentDescription(
m_RenderTexture->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_CLEAR,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
m_RenderTexture->Samples()
);
VkAttachmentReference colorRef = { 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL };
std::array<VkSubpassDependency, 2> dependencies;
dependencies[0] = CreateSubPassDependency(
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
dependencies[1] = CreateSubPassDependency(
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
VkSubpassDescription subpassDesc = { };
subpassDesc.colorAttachmentCount = 1;
subpassDesc.pColorAttachments = &colorRef;
subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
VkRenderPassCreateInfo renderpassCi = { };
renderpassCi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderpassCi.attachmentCount = 1;
renderpassCi.pAttachments = &attachDesc;
renderpassCi.dependencyCount = static_cast<u32>(dependencies.size());
renderpassCi.pDependencies = dependencies.data();
renderpassCi.subpassCount = 1;
renderpassCi.pSubpasses = &subpassDesc;
VkFramebufferCreateInfo framebufferCi = { };
framebufferCi.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferCi.height = kTextureSize;
framebufferCi.width = kTextureSize;
framebufferCi.attachmentCount = 1;
framebufferCi.layers = 1;
framebufferCi.pAttachments = &attachment;
m_pFrameBuffer->Finalize(framebufferCi, renderpassCi);
After rendering the skybox, and storing it into a cubemap, I used the following renderpass to sample the sky onto the rendered scene. This pass uses VK_LOAD_OP_LOAD so as to not clear the rendered scene when drawing the skybox onto it:
// Create a renderpass for the pbr overlay.
Texture* pbrColor = gResources().GetRenderTexture(PBRColorAttachStr);
Texture* pbrNormal = gResources().GetRenderTexture(PBRNormalAttachStr);
Texture* pbrPosition = gResources().GetRenderTexture(PBRPositionAttachStr);
Texture* pbrRoughMetal = gResources().GetRenderTexture(PBRRoughMetalAttachStr);
Texture* pbrDepth = gResources().GetRenderTexture(PBRDepthAttachStr);
Texture* RTBright = gResources().GetRenderTexture(RenderTargetBrightStr);
std::array<VkAttachmentDescription, 6> attachmentDescriptions;
VkSubpassDependency dependenciesNative[2];
attachmentDescriptions[0] = CreateAttachmentDescription(
pbrColor->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrColor->Samples()
);
attachmentDescriptions[1] = CreateAttachmentDescription(
pbrNormal->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrNormal->Samples()
);
attachmentDescriptions[2] = CreateAttachmentDescription(
RTBright->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
RTBright->Samples()
);
attachmentDescriptions[3] = CreateAttachmentDescription(
pbrPosition->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrPosition->Samples()
);
attachmentDescriptions[4] = CreateAttachmentDescription(
pbrRoughMetal->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrRoughMetal->Samples()
);
attachmentDescriptions[5] = CreateAttachmentDescription(
pbrDepth->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrDepth->Samples()
);
dependenciesNative[0] = CreateSubPassDependency(
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
dependenciesNative[1] = CreateSubPassDependency(
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
std::array<VkAttachmentReference, 5> attachmentColors;
VkAttachmentReference attachmentDepthRef = { static_cast<u32>(attachmentColors.size()), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL };
attachmentColors[0].attachment = 0;
attachmentColors[0].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[1].attachment = 1;
attachmentColors[1].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[2].attachment = 2;
attachmentColors[2].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[3].attachment = 3;
attachmentColors[3].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[4].attachment = 4;
attachmentColors[4].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
VkSubpassDescription subpass = {};
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.colorAttachmentCount = static_cast<u32>(attachmentColors.size());
subpass.pColorAttachments = attachmentColors.data();
subpass.pDepthStencilAttachment = &attachmentDepthRef;
VkRenderPassCreateInfo renderpassCI = CreateRenderPassInfo(
static_cast<u32>(attachmentDescriptions.size()),
attachmentDescriptions.data(),
2,
dependenciesNative,
1,
&subpass
);
VkResult result =
vkCreateRenderPass(rhi->LogicDevice()->Native(), &renderpassCI, nullptr, &m_SkyboxRenderPass);
This is the command buffer for rendering the sky onto my scene. I submit this commandbuffer after rendering the scene to take advantage of early z rejection:
if (m_pSkyboxCmdBuffer) {
m_pRhi->DeviceWaitIdle();
m_pSkyboxCmdBuffer->Reset(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
}
VkCommandBufferBeginInfo beginInfo = { };
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
CommandBuffer* buf = m_pSkyboxCmdBuffer;
FrameBuffer* skyFrameBuffer = gResources().GetFrameBuffer(PBRFrameBufferStr);
GraphicsPipeline* skyPipeline = gResources().GetGraphicsPipeline(SkyboxPipelineStr);
DescriptorSet* global = m_pGlobal->Set();
DescriptorSet* skybox = gResources().GetDescriptorSet(SkyboxDescriptorSetStr);
VkDescriptorSet descriptorSets[] = {
global->Handle(),
skybox->Handle()
};
buf->Begin(beginInfo);
std::array<VkClearValue, 6> clearValues;
clearValues[0].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[1].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[2].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[3].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[4].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[5].depthStencil = { 1.0f, 0 };
VkViewport viewport = {};
viewport.height = (r32)m_pWindow->Height();
viewport.width = (r32)m_pWindow->Width();
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
viewport.y = 0.0f;
viewport.x = 0.0f;
VkRenderPassBeginInfo renderBegin = { };
renderBegin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderBegin.framebuffer = skyFrameBuffer->Handle();
renderBegin.renderPass = m_pSky->GetSkyboxRenderPass();
renderBegin.clearValueCount = static_cast<u32>(clearValues.size());
renderBegin.pClearValues = clearValues.data();
renderBegin.renderArea.offset = { 0, 0 };
renderBegin.renderArea.extent = m_pRhi->SwapchainObject()->SwapchainExtent();
// Start the renderpass.
buf->BeginRenderPass(renderBegin, VK_SUBPASS_CONTENTS_INLINE);
buf->SetViewPorts(0, 1, &viewport);
buf->BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, skyPipeline->Pipeline());
buf->BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, skyPipeline->Layout(), 0, 2, descriptorSets, 0, nullptr);
VertexBuffer* vertexbuffer = m_pSky->GetSkyboxVertexBuffer();
IndexBuffer* idxBuffer = m_pSky->GetSkyboxIndexBuffer();
VkDeviceSize offsets[] = { 0 };
VkBuffer vert = vertexbuffer->Handle()->NativeBuffer();
VkBuffer ind = idxBuffer->Handle()->NativeBuffer();
buf->BindVertexBuffers(0 , 1, &vert, offsets);
buf->BindIndexBuffer(ind, 0, VK_INDEX_TYPE_UINT32);
buf->DrawIndexed(idxBuffer->IndexCount(), 1, 0, 0, 0);
buf->EndRenderPass();
buf->End();
Finally, I submit it inside my rendering function:
// TODO(): Need to clean this up.
VkCommandBuffer offscreenCmd = m_Offscreen._CmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle();
VkCommandBuffer skyBuffers[] = { m_Offscreen._CmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle(), m_pSky->CmdBuffer()->Handle() };
VkSemaphore skyWaits[] = { m_Offscreen._Semaphore->Handle(), m_pSky->SignalSemaphore()->Handle() };
VkSemaphore waitSemas[] = { m_pRhi->SwapchainObject()->ImageAvailableSemaphore() };
VkSemaphore signalSemas[] = { m_Offscreen._Semaphore->Handle() };
VkSemaphore shadowSignal[] = { m_Offscreen._ShadowSema->Handle() };
VkPipelineStageFlags waitFlags[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT };
VkSubmitInfo offscreenSI = {};
offscreenSI.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
offscreenSI.pCommandBuffers = &offscreenCmd;
offscreenSI.commandBufferCount = 1;
offscreenSI.signalSemaphoreCount = 1;
offscreenSI.pSignalSemaphores = signalSemas;
offscreenSI.waitSemaphoreCount = 1;
offscreenSI.pWaitSemaphores = waitSemas;
offscreenSI.pWaitDstStageMask = waitFlags;
VkSubmitInfo skyboxSI = offscreenSI;
VkSemaphore skyboxWaits[] = { m_Offscreen._Semaphore->Handle() };
VkSemaphore skyboxSignal[] = { m_SkyboxFinished->Handle() };
VkCommandBuffer skyboxCmd = m_pSkyboxCmdBuffer->Handle();
skyboxSI.commandBufferCount = 1;
skyboxSI.pCommandBuffers = &skyboxCmd;
skyboxSI.pSignalSemaphores = skyboxSignal;
skyboxSI.pWaitSemaphores = skyboxWaits;
VkSubmitInfo hdrSI = offscreenSI;
VkSemaphore hdrWaits[] = { m_SkyboxFinished->Handle() };
VkSemaphore hdrSignal[] = { m_HDR._Semaphore->Handle() };
VkCommandBuffer hdrCmd = m_HDR._CmdBuffers[m_HDR._CurrCmdBufferIndex]->Handle();
hdrSI.pCommandBuffers = &hdrCmd;
hdrSI.pSignalSemaphores = hdrSignal;
hdrSI.pWaitSemaphores = hdrWaits;
VkSemaphore waitSemaphores = m_HDR._Semaphore->Handle();
if (!m_HDR._Enabled) waitSemaphores = m_Offscreen._Semaphore->Handle();
// Update materials before rendering the frame.
UpdateMaterials();
// begin frame. This is where we start our render process per frame.
BeginFrame();
while (m_Offscreen._CmdBuffers[m_HDR._CurrCmdBufferIndex]->Recording() || !m_pRhi->CmdBuffersComplete()) {}
// Render shadow map here. Primary shadow map is our concern.
if (m_pLights->PrimaryShadowEnabled()) {
VkCommandBuffer shadowbuf[] = { m_Offscreen._ShadowCmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle() };
VkSubmitInfo shadowSubmit = { };
shadowSubmit.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
shadowSubmit.pCommandBuffers = shadowbuf;
shadowSubmit.commandBufferCount = 1;
shadowSubmit.signalSemaphoreCount = 1;
shadowSubmit.waitSemaphoreCount = 1;
shadowSubmit.pWaitSemaphores = waitSemas;
shadowSubmit.pSignalSemaphores = shadowSignal;
shadowSubmit.pWaitDstStageMask = waitFlags;
// Submit shadow rendering.
m_pRhi->GraphicsSubmit(shadowSubmit);
offscreenSI.pWaitSemaphores = shadowSignal;
}
// Check if sky needs to update it's cubemap.
if (m_pSky->NeedsRendering()) {
skyboxSI.waitSemaphoreCount = 2;
skyboxSI.pWaitSemaphores = skyWaits;
offscreenSI.commandBufferCount = 2;
offscreenSI.signalSemaphoreCount = 2;
offscreenSI.pSignalSemaphores = skyWaits;
offscreenSI.pCommandBuffers = skyBuffers;
m_pSky->MarkClean();
}
// Offscreen PBR Forward Rendering Pass.
m_pRhi->GraphicsSubmit(offscreenSI);
// Render Sky onto our render textures.
m_pRhi->GraphicsSubmit(skyboxSI);
// High Dynamic Range and Gamma Pass.
if (m_HDR._Enabled) m_pRhi->GraphicsSubmit(hdrSI);
// Before calling this cmd buffer, we want to submit our offscreen buffer first, then
// sent our signal to our swapchain cmd buffers.
// TODO(): We want to hold off on signalling GraphicsFinished Semaphore, and instead
// have it signal the SignalUI semaphore instead. UI Overlay will be the one to use
// GraphicsFinished Semaphore to signal end of frame rendering.
VkSemaphore signal = m_pRhi->GraphicsFinishedSemaphore();
VkSemaphore uiSig = m_pUI->Signal()->Handle();
m_pRhi->SubmitCurrSwapchainCmdBuffer(1, &waitSemaphores, 1, &signal);
// Render the Overlay.
RenderOverlay();
EndFrame();
On an Nvidia GTX 870M, the results seem to work as expected,
However, using Intel HD Graphics 620, I get this screenshot, unfortunately I can't display here because it's too big: https://github.com/CheezBoiger/Recluse-Game/blob/master/Regression/Shaders/ForwardPass.png
It seems as though the scene from previous frames are left un-cleared onto the color attachment, as if it was rendering onto a separate surface and using that instead, but it should be cleared every frame at the beginning of rendering...
Removing VK_LOAD_OP_LOAD and replacing with VK_LOAD_OP_CLEAR, the situation clears, however, only the skybox is rendered... I am wondering if my render pass is not doing something that it needs to be doing on Intel hardware, or am I going about drawing the skybox onto my rendered scene all wrong?
Much appreciated on the help.
* Update *
Problem fixed, with solution by #Ekzuzy below.
Final Image on Intel Hardware after fix:
You always provide UNDEFINED layout for the initial layout in all Your render passes and for all attachments. Layout transition from UNDEFINED layout to any other layout doesn't guarantee image contents to be preserved. So if You create a render pass with LOAD value for the load op, You need to provide an actual layout given image has just before the render pass starts. This applies to other layout transitions as well (through memory barriers).
As for clears, some images should be cleared at the beginning of a frame or render pass. So for them You can leave UNDEFINED as the initial layout but You should change the load op to clear.
As to why this works on Nvidia and doesn't work on Intel - layout transitions don't have any effect on Nvidia's hardware, but they are important on Intel's platforms (and on AMD's too). So skipping (or setting improper) layout transitions, even though it violates the specification, it still should work on Nvidia. But don't do that just because it works. Such approach is invalid. And future platforms, even from the same vendor, may behave differently.

Get physicsBody position when collide

I seem to have some problems with getting the position of a physicsBody.
I want to set the position of a physicsBody (bodyA) to the position of an other physicsBody (bodyB), when this one collides with a third one.
I tried using _bodyA.position = _bodyB.position; but this doesn't work.
The problem is, that the physicsBody I want to get the position of is moving around all the time. So I might need a method that checks the position of the physicsBody in every frame and when the physicsBody collides, the method returns the CGPoint.
This might be a simple task, but I can't figure it out. Thanks for help!
I did up a quick sample project for you to take a look at. You can fine tune the code to fit your exact needs but it will show you how to do what you asked.
Tap anywhere on the screen to launch the blue square into the red square. Once the two make contact, the green square's position will change to the red square.
Another alternative to the way I wrote the code is to have the sprites added to an array but that all depends on how your code is set up.
Side note - you cannot update a sprite's position in the didBeginContact: section because it will not work. That is why I created the CGPoint to store the position and update Object C during the next cycle in update:
Here is the code:
#import "MyScene.h"
typedef NS_OPTIONS(uint32_t, CNPhysicsCategory)
{
Category1 = 1 << 0,
Category2 = 1 << 1,
Category3 = 1 << 2,
};
#interface MyScene()<SKPhysicsContactDelegate>
#end
#implementation MyScene
{
SKSpriteNode *objectA;
SKSpriteNode *objectB;
SKSpriteNode *objectC;
CGPoint newPositionForObjectC;
}
-(id)initWithSize:(CGSize)size
{
if (self = [super initWithSize:size])
{
self.physicsWorld.contactDelegate = self;
objectA = [SKSpriteNode spriteNodeWithColor:[SKColor redColor] size:CGSizeMake(30, 30)];
objectA.position = CGPointMake(110, 20);
objectA.name = #"objectA";
objectA.physicsBody = [SKPhysicsBody bodyWithRectangleOfSize:objectA.size];
objectA.physicsBody.categoryBitMask = Category1;
objectA.physicsBody.collisionBitMask = Category2;
objectA.physicsBody.contactTestBitMask = Category2;
objectA.physicsBody.affectedByGravity = NO;
[self addChild:objectA];
objectB = [SKSpriteNode spriteNodeWithColor:[SKColor blueColor] size:CGSizeMake(30, 30)];
objectB.position = CGPointMake(100, 200);
objectB.name = #"objectB";
objectB.physicsBody = [SKPhysicsBody bodyWithRectangleOfSize:objectB.size];
objectB.physicsBody.categoryBitMask = Category2;
objectB.physicsBody.collisionBitMask = Category1;
objectB.physicsBody.contactTestBitMask = Category1;
objectB.physicsBody.affectedByGravity = NO;
[self addChild:objectB];
objectC = [SKSpriteNode spriteNodeWithColor:[SKColor greenColor] size:CGSizeMake(30, 30)];
objectC.position = CGPointMake(250, 250);
objectC.name = #"objectC";
objectC.physicsBody = [SKPhysicsBody bodyWithRectangleOfSize:objectC.size];
objectC.physicsBody.categoryBitMask = Category3;
objectC.physicsBody.collisionBitMask = 0;
objectC.physicsBody.contactTestBitMask = 0;
objectC.physicsBody.affectedByGravity = NO;
[self addChild:objectC];
newPositionForObjectC = CGPointMake(0, 0);
}
return self;
}
- (void)didBeginContact:(SKPhysicsContact *)contact
{
uint32_t collision = (contact.bodyA.categoryBitMask | contact.bodyB.categoryBitMask);
if (collision == (Category1 | Category2))
{
newPositionForObjectC = CGPointMake(contact.bodyA.node.position.x, contact.bodyA.node.position.y);
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
[objectA.physicsBody applyImpulse:CGVectorMake(0, 5)];
}
-(void)update:(CFTimeInterval)currentTime
{
if(newPositionForObjectC.x > 0)
{
objectC.position = newPositionForObjectC;
newPositionForObjectC = CGPointMake(0, 0);
}
}
#end

Conditional Line Graph using Open Flash Charts

I am using Open Flash Charts v2. I have been trying to make Conditional line graph. But I couldn't find any straight forward way, example or any class for producing Conditional charts.
Example of Conditional Graph
So I thought to use some techniques to emulate conditional graph ,I made separate Line object for values above limit range and then this line is used to overlap the plotted line.
This techniques works some what ok ,but there are problems with it,
How to color or place the conditional colored line exactly above the limit.
Remove tooltip and dot from limit line.
Tooltip of conditional line(red) and plotted line(green) are both shown ,I only need tooltip of green line.
Conditional Line Graph Problem illustrated
Source Code: // C#
var chart = new OpenFlashChart.OpenFlashChart();
var data1 = new List<double?> { 1, 3, 4, 5, 2, 1, 6, 7 };//>4=
var overlap = new List<double?> { null, null, 4, 5, null, null, null, null };
var overlap2 = new List<double?> { null, null, null, null, null, null, 6, 7 };
var limitData = new List<double?> { 4, 4, 4, 4, 4, 4, 4, 4 };
var line1 = new Line();
line1.Values = data1;
//line1.HaloSize = 0;
line1.Width = 2;
line1.DotSize = 5;
line1.DotStyleType.Tip = "#x_label#<br>#val#";
line1.Colour = "#37c855";
line1.Tooltip = "#val#";
var overLine = new Line();
overLine.Values = overlap;
//overLine.HaloSize = 0;
overLine.Width = 2;
overLine.DotSize = 5;
overLine.DotStyleType.Tip = "#x_label#<br>#val#";
overLine.Colour = "#d81417";
overLine.Tooltip = "#val#";
var overLine2 = new Line();
overLine2.Values = overlap2;
//overLine2.HaloSize = 0;
overLine2.Width = 2;
overLine2.DotSize = 5;
//overLine2.DotStyleType.Tip = "#x_label#<br>#val#";
//overLine2.DotStyleType.Type = DotType.DOT;
overLine2.Colour = "#d81417";
overLine2.Tooltip = "#val#";
var limit = new Line();
limit.Values = limitData;
limit.Width = 2;
limit.Colour = "#ff0000";
limit.HaloSize = -1;
limit.DotSize = -1;
// limit.DotStyleType.Tip = "";
limit.DotStyleType.Type = null;
//limit.Tooltip = "";
chart.AddElement(line1);
chart.AddElement(overLine);
chart.AddElement(overLine2);
chart.AddElement(limit);
chart.Y_Legend = new Legend("Experiment");
chart.Title = new Title("Conditional Line Graph");
chart.Y_Axis.SetRange(0, 10);
chart.X_Axis.Labels.Color = "#e43456";
chart.X_Axis.Steps = 4;
chart.Tooltip = new ToolTip("#val#");
chart.Tooltip.Shadow = true;
chart.Tooltip.Colour = "#e43456";
chart.Tooltip.MouseStyle = ToolTipStyle.CLOSEST;
Response.Clear();
Response.CacheControl = "no-cache";
Response.Write(chart.ToPrettyString());
Response.End();
Note:
I have already downloaded the OFC (Open Flash Charts) source ,If I modify the OFC Line.as source than how would I be able to generate json for the changed graph ? ,b/c I'm currently using .Net library for the json generation for OFC charts,please do let me know this also.
Update:
I have modified the source code on the advice of David Mears I'm using FlashDevelop for ActionScript.
P.S: I'm open for ideas if another library can do this job.
If you don't mind a little rebuilding, you can get the source of OFC here and modify the Line.solid_line() method in open-flash-chart/charts/Line.as to do this fairly easily.
In order to set the extra chart details through JSON using the .NET library, you'll also have to modify OpenFlashChart/LineBase.cs to add alternative colour and boundary properties. I'm not hugely familiar with .NET, but based on the existing properties you might add something like this:
private double boundary;
private string altcolour;
[JsonProperty("boundary")]
public virtual double Boundary
{
set { this.boundary = value; }
get { return this.boundary; }
}
[JsonProperty("alt-colour")]
public virtual string AltColour
{
set { this.altcolour = value; }
get { return this.altcolour; }
}
Then I believe the following should work in Line.as:
public function solid_line(): void {
var first:Boolean = true;
var i:Number;
var tmp:Sprite;
var x:Number;
var y:Number;
var last_e:Element;
var ratio:Number;
for ( i=0; i < this.numChildren; i++ ) {
// Step through every child object.
tmp = this.getChildAt(i) as Sprite;
// Only include data Elements, ignoring extra children such as line masks.
if( tmp is Element )
{
var e:Element = tmp as Element;
if( first )
{
if (this.props.get('alt-colour') != Number.NEGATIVE_INFINITY) {
if (e._y >= this.props.get_colour('boundary'))
{
// Line starts below boundary, set alt line colour.
this.graphics.lineStyle( this.props.get_colour('width'), this.props.get_colour('alt-colour') );
}
else
{
// Line starts above boundary, set normal line colour.
this.graphics.lineStyle( this.props.get_colour('width'), this.props.get_colour('colour') );
}
}
// Move to the first point.
this.graphics.moveTo(e.x, e.y);
x = e.x;
y = e.y;
first = false;
}
else
{
if (this.props.get('alt-colour') != Number.NEGATIVE_INFINITY) {
if (last_e._y < this.props.get_colour('boundary') && e._y >= this.props.get_colour('boundary'))
{
// Line passes below boundary. Draw first section and switch to alt colour.
ratio = (this.props.get_colour('boundary') - last_e._y) / (e._y - last_e._y);
this.graphics.lineTo(last_e.x + (e.x - last_e.x) * ratio, last_e.y + (e.y - last_e.y) * ratio);
this.graphics.lineStyle( this.props.get_colour('width'), this.props.get_colour('alt-colour') );
}
else if (last_e._y >= this.props.get_colour('boundary') && e._y < this.props.get_colour('boundary'))
{
// Line passes above boundary. Draw first section and switch to normal colour.
ratio = (this.props.get_colour('boundary') - last_e._y) / (e._y - last_e._y);
this.graphics.lineTo(last_e.x + (e.x - last_e.x) * ratio, last_e.y + (e.y - last_e.y) * ratio);
this.graphics.lineStyle( this.props.get_colour('width'), this.props.get_colour('colour') );
}
}
// Draw a line to the next point.
this.graphics.lineTo(e.x, e.y);
}
last_e = e;
}
}
if ( this.props.get('loop') ) {
// close the line loop (radar charts)
this.graphics.lineTo(x, y);
}
}
With the new open-flash-chart.swf, you should be able to just set your new properties on line1:
line1.Boundary = 4;
line1.AltColour = "#d81417";

How to put a infowindow on polyline in Google Maps v3?

I want to know how to show infowindow on polyline in using Google Maps Api V3? and to appear in the middle of the polyline ?!
Firstly you will need to calculate the middle/center of the polyline. This has been discussed and answered here;
https://stackoverflow.com/a/9090409/787921
Then you will have to open the infowindow;
var infowindow = new google.maps.InfoWindow({
content: "infowindow text content"});
infowindow.setPosition(midLatLng);
infowindow.open(map);
find the middle point and set your custom view .
func showPath(polyStr :String){
polyline?.map = nil
mapView1.reloadInputViews()
pathDraw = GMSPath(fromEncodedPath: polyStr)!
polyline = GMSPolyline(path: pathDraw)
polyline?.strokeWidth = 4.0
polyline?.strokeColor = UIColor.init(red: 247/255.0, green: 55/255.0, blue: 76/255.0, alpha: 1.0)
polyline?.map = mapView1
let poinsCount = pathDraw.count()
let midpoint = pathDraw.coordinate(at: poinsCount)
DispatchQueue.main.async
{
self.addMarkerPin(corrdinate: midCordinate, distance: "10 min")
}
}
func addMarkerPin(corrdinate:CLLocationCoordinate2D, distance: String)
{
let marker = GMSMarker()
marker.position = corrdinate
PathTimeView = PathInfoView.loadFromNib() //here i am load Xib file, you can use your custom view
let DynamicView=PathTimeView
DynamicView.timelbl.text = distance
UIGraphicsBeginImageContextWithOptions(DynamicView.frame.size, false, UIScreen.main.scale)
DynamicView.layer.render(in: UIGraphicsGetCurrentContext()!)
let imageConverted: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
marker.icon = imageConverted
marker.map = self.mapView1
marker.infoWindowAnchor = CGPoint(x: -1900 , y: -2000)
}
First you should got center/middle of polyline and this what works for me
private fun centerPos(points: MutableList<LatLng>): LatLng {
val middleDistance = SphericalUtil.computeLength(points).div(2)
return extrapolate(points, points.first(), middleDistance.toFloat()) ?: points[0]
}
private fun extrapolate(path: List<LatLng>, origin: LatLng, distance: Float): LatLng? {
var extrapolated: LatLng? = null
if (!PolyUtil.isLocationOnPath(
origin,
path,
false,
1.0
)
) { // If the location is not on path non geodesic, 1 meter tolerance
return null
}
var accDistance = 0f
var foundStart = false
val segment: MutableList<LatLng> = ArrayList()
for (i in 0 until path.size - 1) {
val segmentStart = path[i]
val segmentEnd = path[i + 1]
segment.clear()
segment.add(segmentStart)
segment.add(segmentEnd)
var currentDistance = 0.0
if (!foundStart) {
if (PolyUtil.isLocationOnPath(origin, segment, false, 1.0)) {
foundStart = true
currentDistance = SphericalUtil.computeDistanceBetween(origin, segmentEnd)
if (currentDistance > distance) {
val heading = SphericalUtil.computeHeading(origin, segmentEnd)
extrapolated = SphericalUtil.computeOffset(
origin,
(distance - accDistance).toDouble(),
heading
)
break
}
}
} else {
currentDistance = SphericalUtil.computeDistanceBetween(segmentStart, segmentEnd)
if (currentDistance + accDistance > distance) {
val heading = SphericalUtil.computeHeading(segmentStart, segmentEnd)
extrapolated = SphericalUtil.computeOffset(
segmentStart,
(distance - accDistance).toDouble(),
heading
)
break
}
}
accDistance += currentDistance.toFloat()
}
return extrapolated
}
then You can add infoWindow with normal way with your platform at it is differ from each platform

ActionScript 2 character selection

I haven't been able to make a character selection in ActionScript 2 so what is an example that, if I click on this button, a movieclip comes out in this frame?
Frame 1:
movieClip1.alpha = 0;
movieClip1.stop();
movieClip2.alpha = 0;
movieClip2.stop();
movieClip3.alpha = 0;
movieClip3.stop();
button1.onPress = function() {
movieClip1.alpha = 100;
movieClip1.play();
}
button2.onPress = function() {
movieClip2.alpha = 100;
movieClip2.play();
}
button3.onPress = function() {
movieClip3.alpha = 100;
movieClip3.play();
}
try something like the below. I haven;t tested this so it prob won;t compile but it'll be very close. Basically put this on a single empty frame on the main timeline. make sure you have button and character movieclips all with export settings and linkage identifiers set. Modify code below and see what happens.
var numButtons:Number = 10; //number of buttons you want
var buttonMovieClipName:String = "button"; //linkage identifier of button
var startX:Number = 10; //start x position
var startY:Number = 500; //start y position
var dist:Number = 10; //distance between buttons
var characters:Array = {"A","B","C","D"}; //linkage names of your characters
var currentChar:MovieClip = null;
for(var i:Number = 0; i < numButtons; i++)
{
this.attachMovie("button", "button"+i, this.getNextHighestDepth());
this["button"+i]._x = startX + (i*(dist+this["button"+i]._width]));
this["button"+i]._y = startY;
this["button"+i].character = characters[i];
this["button"+i].onPress = displayCharacter;
}
function displayCharacter():void
{
var par = this._parent;
//remove previous character on stage
if(currentChar != null)
{
removeMovieClip(par[currentChar]);
}
par.attachMovie(this.character, this.character, par.getNextHighestDepth()); //atach character
par[this.character]._x = 400; //set to whatever
par[this.character]._y = 300; //set to whatever
currentChar = this.character; //set current character to this
}

Resources