Why inside Let while loop does not work SML - functional-programming

i am new at Standard ML and i have a question , at the code below when
i run without the let-in structure , the code runs and gives result .When i put the the let-in
it shows syntax errors . Can anyone help me ?
fun findInd([],size, value , ans , l , h ) = ~1
| findInd(lista ,size, value , ans , l , h) =
let
val midval = Real.round((real l + real h) / real 2)
val Nelem = List.nth(lista,midval)
in
if l<=h then
if Nelem <= value then findInd(lista,size,value,midval,midval+1,h )
else findInd(lista,size,value,ans,l,midval+(~1) )
else
ans
end;
let (* <-- this let gives the problem *)
val s = 0
val sum = ref s
val maxlen = 0
val maxlenptr = ref maxlen
val counter_start = 0
val counter = ref counter_start
val arr = [1,5,~58,~1]
val presum = [~53,~52,1,6]
val minInd = [3,2,0,0
while !counter < List.length(arr) do (
sum := !sum + List.nth(arr,!counter);
if !sum >=0 then maxlenptr := !counter + 1
else
let
val ind = findInd(presum, List.length(arr) , s , ~1 ,0 , List.length(arr) + (~1) )
val temp = List.nth(minInd,ind)
in
if ind <> ~1 andalso temp < counter_start then maxlenptr := Int.max(!maxlenptr,counter_start + (~temp))
else ()
end;
counter := !counter + 1
);
val m = !maxlenptr
in (* <--- this in *)
m
end;

The syntax of let is let <declarations> in <expression> and a while loop is not a declaration. You'll need to move it after the in (like you did with the ifs in the other lets).

Related

pascal scale of notation

I'm doing a procedure that reads numbers character by character.
procedure ReadLongint (var success : boolean; var result : longin);
var
c : char;
res : longint;
pos : integer;
begin
res := 0;
pos := 0;
repeat
read(c);
pos := pos + 1
until (c <> ' ') and (c <> #10);
while (c <> ' ') and (c <> #10) do
begin
if (c < '0') or (c > '9') then
begin
writeln('Unexpected ''', c, ''''' in pos: ', pos);
readln;
success := false;
exit
end;
res := res*10 + ord(c) - ord('0');
read(c);
pos := pos + 1
end;
result := res;
success := true
end;
I'm trying to make it with the ability to select any number systems up to 36.
procedure ReadLongint (var success : boolean; var result : longint; var notation : char);
var
c : char;
res : longint;
pos : integer;
begin
res := 0;
pos := 0;
repeat
read(c);
pos := pos + 1
until (c <> ' ') and (c <> #10);
while (c <> ' ') and (c <> #10) do
begin
if (notation > #48) and (notation < #58) then
begin
res := res*10 + ord(c) - ord('0');
end;
if (notation > #64) and (notation < #91) then
begin
res := res*10 + ord(c) - ord('0');?????????
end;
read(c);
pos := pos + 1;
end;
result := res;
success := true
end;
I'm trying to make it with the ability to select any number systems up to 36.
When choosing from 2 to 10, the reading algorithm is the same res := res*10 + ord(c) - ord('0');
But how to correctly read the number of systems of calculation, from A to Z?
Tell me, please.

How to create Pascal?

I am very difficult to display all the output results.
this code.
DEF VAR INPUTAN AS INTEGER.
DEF VAR i AS INTEGER.
DEF VAR j AS INTEGER.
DEF VAR a AS INTEGER.
DEF VAR rows AS INT.
DEF VAR pascal AS CHAR FORMAT "x(25)".
SET INPUTAN.
a = 1.
REPEAT i = 0 TO INPUTAN:
rows = i.
DISPLAY rows.
REPEAT j = 0 TO i :
IF j = 0 OR j = i THEN DO:
a = 1.
END.
ELSE
a = a * (i + 1 - j) / j.
pascal = STRING(a).
display a.
END.
END.
DEF VAR INPUTAN AS INTEGER.
DEF VAR i AS INTEGER.
DEF VAR j AS INTEGER.
DEF VAR a AS INTEGER.
DEF VAR rows AS INT.
DEF VAR pascal AS CHAR.
SET INPUTAN.
a = 1.
REPEAT i = 0 TO INPUTAN:
rows = i.
/*DISPLAY rows. */
REPEAT j = 0 TO i :
IF j = 0 OR j = i THEN DO:
a = 1.
END.
ELSE
a = a * (i + 1 - j) / j.
IF j = 0 THEN
pascal = pascal + FILL(" ", INPUTAN - i).
pascal = pascal + STRING(a) + " ".
IF j = i THEN
pascal = pascal + CHR(13).
/* display a.*/
END.
END.
MESSAGE pascal
VIEW-AS ALERT-BOX INFO BUTTONS OK.

error bernstein vandermonde julia.

the following error occurs. I tried to change the n .... but not working
"LoadError: BoundsError: attempt to access 9-element Array{Float64,1}:"
function bernstein_vandermonde( n )
if n == 1
v = ones(1, 1);
return v
end
v = zeros( n, n );
x = linspace( 0, 1, n );
for i = 1:n
println("entra no loop")
v[i,1:n] = bernstein_poly_01(n - 1, x[i])
end
return v
end
function bernstein_poly_01( n, x )
bern = ones(n)
if n == 0
bern[1] = 1
elseif 0 < n
bern[1] = 1 -x
bern[2] = x
for i = 2:n
bern[i+1] = x*bern[i];
for j = i-1:-1: 1
bern[j+1] = x*bern[j] + (1 - x)*bern[j+1]
end
bern[1] = (1 - x)*bern[1]
end
end
return bern
end
I can not solve :(

F# closures on mailbox processor threading failure

So I am doing a some batch computation very cpu intensive on books. And I built a tracker to track the computation of tasks. I close on a mailboxprocesser which all runs fine without parallelizaton but when I put a array.parallel.map or and async workflow the mailboxprocesser fails. I want to know why?
type timerMessage =
| Start of int
| Tick of bool
let timer = MailboxProcessor.Start(fun mbox ->
let inputloop() = async {
let progress = ref 0
let amount = ref 0
let start = ref System.DateTime.UtcNow
while true do
let! msg = mbox.Receive()
match msg with
| Start(i) -> amount := i
progress := 0
start := System.DateTime.UtcNow
| Tick(b) -> if !amount = 0 then ()
else
progress := !progress + 1
let el = System.DateTime.UtcNow - !start
let eta = int ((el.TotalSeconds/float !progress)*(float (!amount - !progress)))
let etas = (int (eta / 3600)).ToString() + ":" + (int ((eta % 3600) / 60)).ToString() + ":" + (eta % 60).ToString()
System.Console.Clear()
System.Console.Write((!progress).ToString() + "/" + (!amount).ToString() + " Completed [] Estimated Time Remaining:" + etas)
} inputloop() )
let computeBook (author :string) path =
let rs = ReadToStrings(path)
let bk = StringsToBook rs
let mt = createMatrix bk 100 10 //size 100 //span 10
let res = GetResults mt
//do stuff
timer.Post(Tick(true))
(author,path,res)
let partAopA = //clip head clip foot no word mods
let lss = seq {for x in processtree do
for y in (snd x) do
yield ((fst x),y) }
let ls = Seq.toArray lss //task list
timer.Post(Start(ls.Length)) //start counter
let compls = Array.map (fun l -> computeBook (fst l) (snd l) ) ls //Array.Parallel.map fails here the same as below async if I put async blcoks around the computbook call
//let res = compls |> Async.Parallel |> Async.RunSynchronously
writeResults compls outputfolder |> ignore
compls

Difficulty solving this with recursive code

I need to find the length of the longest common subsequence.
s and t are Strings, and n and m are their lengths. I would like to write a recursive code.
This is what I did so far but I cant get any progress:
def lcs_len_v1(s, t):
n = len(s)
m = len(t)
return lcs_len_rec(s,n,t,m)
def lcs_len_rec(s,size_s,t,size_t):
cnt= 0
if size_s==0 or size_t==0:
return 0
elif s[0]==t[0]:
cnt= +1
return cnt, lcs_len_rec(s[1:], len(s[1:]), t[1:], len(t[1:]))
This works:
def lcs(xstr, ystr):
if not xstr or not ystr:
return ""
x, xs, y, ys = xstr[0], xstr[1:], ystr[0], ystr[1:]
if x == y:
return x + lcs(xs, ys)
else:
return max(lcs(xstr, ys), lcs(xs, ystr), key=len)
print(lcs("AAAABCC","AAAACCB"))
# AAAACC
You should know that a recursive approach will only work with relatively trivial string; the complexity increases very rapidly with longer strings.
this is my code, how can I use on it the memoization technique?
def lcs_len_v1(s, t):
n = len(s)
m = len(t)
return lcs_len_rec(s,n,t,m)
def lcs_len_rec(s,size_s,t,size_t):
if size_s==0 or size_t==0:
return 0
elif s[0]==t[0]:
cnt=0
cnt+= 1
return cnt+ lcs_len_rec(s[1:], size_s-1, t[1:], size_t-1)
else:
return max(lcs_len_rec(s[1:], size_s-1, t, size_t), lcs_len_rec(s, size_s, t[1:], size_t-1))
Using the memoization technique, you can run the algorithm also with a very long strings. Infact it is just O(n^2):
def recursiveLCS(table, s1, s2):
if(table[len(s1)][len(s2)] != False):
return table[len(s1)][len(s2)]
elif len(s1) == 0 or len(s2) == 0:
val = ""
elif s1[0] == s2[0]:
val = s1[0] + recursiveLCS(table, s1[1:], s2[1:])
else:
res1 = recursiveLCS(table, s1[1:], s2)
res2 = recursiveLCS(table, s1, s2[1:])
val = res2
if len(res1) > len(res2):
val = res1
table[len(s1)][len(s2)] = val
return val
def computeLCS(s1, s2):
table = [[False for col in range(len(s2) + 1)] for row in range(len(s1) + 1)]
return recursiveLCS(table, s1, s2)
print computeLCS("testistest", "this_is_a_long_testtest_for_testing_the_algorithm")
Output:
teststest

Resources