Commit db3b0110 authored by Michael Hanus 's avatar Michael Hanus
Browse files

addtypes removed, browser sources added from package

parent 4555c50a
......@@ -4,6 +4,11 @@
Curry_Main_Goal.curry
*.agdai
*_cache
browser/.cpm/packages/addtypes-0.0.1
browser/.cpm/packages/cass-0.0.1
browser/.cpm/packages/cass-analysis-0.0.4
browser/.cpm/packages/importusage-0.0.1
browser/.cpm/packages/showflatcurry-0.0.1
optimize/.cpm/packages/cass-0.0.1
optimize/.cpm/packages/cass-analysis-0.0.4
......@@ -13,8 +18,6 @@ browser/BrowserGUI
browser/GenInt
browser/ShowFlatCurry
browser/SourceProgGUI
casc/CASC
casc/Docs/curry-style-guide.html
CASS/cass
CASS/cass_worker
cpm/src/CPM.Main
......@@ -23,7 +26,6 @@ curry2js/Curry2JS
currypp/Main
currycheck/CurryCheck
currydoc/CurryDoc
currytest/CurryTest
importcalls/ImportCalls
optimize/BindingOpt
runcurry/RunCurry
......
......@@ -28,10 +28,6 @@ all: $(make_TOOLDIRS)
###########################################################################
# Define dependencies between the different tools:
make_browser: | make_analysis make_CASS make_addtypes make_importcalls \
make_currydoc # avoid conflicts in analysis
@$(MAKE) now_$@
make_analysis:
@$(MAKE) now_$@
......
# Makefile for generating AddTypes tool
TOOL = $(BINDIR)/$(CURRYSYSTEM)-addtypes
.PHONY: all compile install clean uninstall
all: install
compile: AddTypes
install: compile
rm -f $(TOOL) $(BINDIR)/addtypes
cd $(BINDIR) && ln -s ../currytools/addtypes/AddTypes $(notdir $(TOOL))
clean:
$(CLEANCURRY)
rm -f AddTypes
uninstall: clean
rm -f $(TOOL)
AddTypes: AddTypes.curry $(LIBDIR)/AbstractCurry/*.curry \
$(LIBDIR)/List.curry $(LIBDIR)/AllSolutions.curry
$(REPL) $(REPL_OPTS) :load AddTypes :save :quit
# curry-addtypes - A tool to add missing type signatures in a Curry program
This package contains the tool `curry-addtypes` which adds
missing type signatures to top-level operations in a Curry module.
Moreover, it contains a library to process strings containing
Curry source code and classifies it into a few standard categories
## Installing the tool
The tool can be directly installed by the command
> cpm installbin addtypes
This installs the executable `curry-addtypes` in the bin directory of CPM.
## Using the tool
If the bin directory of CPM (default: `~/.cpm/bin`) is in your path,
execute the tool with the Curry program where type signatures should
be added, e.g.,
> curry-addtypes LazyProgram
{
"name": "addtypes",
"version": "0.0.1",
"author": "Bernd Brassel <bbr@informatik.uni-kiel.de>",
"maintainer": "Michael Hanus <mh@informatik.uni-kiel.de>",
"synopsis": "A tool to add missing type signatures in a Curry program",
"dependencies": {
},
"description":
"This package contains a tool which adds missing type signatures
to top-level operations in a Curry module.
Moreover, it contains a library to process strings containing
Curry source code and classifies it into a few standard categories",
"exportedModules": [ "AddTypes", "CurryStringClassifier" ],
"executable": { "name": "curry-addtypes",
"main": "AddTypes"
},
"source": {
"git": "https://git.ps.informatik.uni-kiel.de/curry-packages/addtypes.git",
"tag": "$version"
}
}
----------------------------------------------------------------------
--- The Curry string classifier is a simple tool to process strings containing
--- Curry source code. The source string is classified into the following
--- categories:
---
--- * moduleHead - module interface, imports, operators
---
--- * code - the part where the actual program is defined
---
--- * big comment - parts enclosed in {- ... -}
---
--- * small comment - from "--" to the end of a line
---
--- * text - a string, i.e. text enclosed in "..."
---
--- * letter - the given string is the representation of a character
---
--- * meta - containing information for meta programming
---
--- For an example to use the state scanner cf. addtypes, the tool
--- to add function types to a given program.
---
--- @author Bernd Brassel
--- @version April 2005
--- @category meta
----------------------------------------------------------------------
module CurryStringClassifier
(Tokens,Token(..), scan, plainCode, unscan,
isSmallComment, isBigComment, isComment, isText, isLetter,
isCode, isModuleHead, isMeta, readScan,testScan)
where
import Char(isDigit,isSpace)
--- The different categories to classify the source code.
data Token = SmallComment String
| BigComment String
| Text String
| Letter String
| Code String
| ModuleHead String
| Meta String
type Tokens = [Token]
--- test for category "SmallComment"
isSmallComment x = case x of
SmallComment _ -> True
_ -> False
--- test for category "BigComment"
isBigComment x = case x of
BigComment _ -> True
_ -> False
--- test if given token is a comment (big or small)
isComment x = isSmallComment x || isBigComment x
--- test for category "Text" (String)
isText x = case x of
Text _ -> True
_ -> False
--- test for category "Letter" (Char)
isLetter x = case x of
Letter _ -> True
_ -> False
--- test for category "Code"
isCode x = case x of
Code _ -> True
_ -> False
--- test for category "ModuleHead", ie imports and operator declarations
isModuleHead x = case x of
ModuleHead _ -> True
_ -> False
--- test for category "Meta", ie between {+ and +}
isMeta x = case x of
Meta _ -> True
_ -> False
weaveIntoCode :: (Tokens -> Tokens) -> Tokens -> Tokens
weaveIntoCode f ts =
let (cs,ncs) = unweaveCode ts in weave (f cs,ncs)
unweaveCode :: Tokens -> (Tokens,Tokens)
unweaveCode [] = ([],[])
unweaveCode (t:ts) = let (cs,ncs) = unweaveCode ts in
if isCode t then (t:cs,ncs) else (cs,t:ncs)
weave xys = case xys of
([],[]) -> []
([],[y]) -> [y]
([x],[]) -> [x]
(x:xs,y:ys) -> x:y:weave (xs,ys)
--- Divides the given string into the six categories.
--- For applications it is important to know whether a given part of
--- code is at the beginning of a line or in the middle. The state scanner
--- organizes the code in such a way that every string categorized as
--- "Code" <b>always</b> starts in the middle of a line.
scan :: [Char] -> Tokens
scan s = modHead id (stateScan 1 (Code x) x s) where x free
stateScan :: Int -> Token -> [Char] -> [Char] -> Tokens
stateScan _ token x "" | x=:="" = [token]
stateScan _ (Code co) x [c] | x=:=[c] = maybeCode co []
stateScan _ (Text t) x [c]
| x=:="" = if c=='\"' then [Text t]
else error "File ended while scanning string."
stateScan _ (BigComment _) x [_]
| x=:="" = error "File ended while expecting -}"
stateScan _ (Meta _) x [_]
| x=:="" = error "File ended while expecting +}"
stateScan line (Code co) x (c:c':cs)
| c=='\"' = (x=:="") &>
(maybeCode co (stateScan line (Text y) y (c':cs)))
| c=='-' && c'=='-'
= let (comment,rest) = span (/='\n') cs
in (x=:="") &> maybeCode co
(SmallComment comment :
(stateScan line (Code y) y rest))
| c=='{' && c'=='-'
= (x=:="") &> maybeCode co (stateScan line (BigComment y) y cs)
| c=='{' && c'=='+'
= (x=:="") &> maybeCode co (stateScan line (Meta y) y cs)
| c'=='\'' && elem c (infixIDs++delimiters)
= (x=:=[c]) &> maybeCode co $
case cs of
'\\':l:'\'':rest -> Letter ['\\',l] : (stateScan line (Code y) y rest)
'\\':a:b:d:'\'':rest ->
if all isDigit [a,b,d]
then Letter ['\\',a,b,d] : (stateScan line (Code y) y rest)
else error $ "Improperly terminated character found in line "
++ show line
l:'\'':rest -> Letter [l] : (stateScan line (Code y) y rest)
_ -> error $ "Improperly terminated character in line "++show line
| c=='\n'
= (x=:=c:y) &> stateScan (line+1) (Code co) y (c':cs)
| otherwise
= (x=:=c:y) &> stateScan line (Code co) y (c':cs)
where
y free
stateScan line (BigComment co) x (c:c':cs)
| c=='-' && c'=='}'
= (x=:="") &> BigComment co : (stateScan line (Code y) y cs)
| c=='\n'
= (x=:=c:y) &> stateScan (line+1) (BigComment co) y (c':cs)
| otherwise
= (x=:=c:y) &> stateScan line (BigComment co) y (c':cs)
where
y free
stateScan line (Meta co) x (c:c':cs)
| c=='+' && c'=='}'
= (x=:="") &> Meta co : (stateScan line (Code y) y cs)
| c=='\n'
= (x=:=c:y) &> stateScan (line+1) (Meta co) y (c':cs)
| otherwise
= (x=:=c:y) &> stateScan line (Meta co) y (c':cs)
where
y free
stateScan line (Text t) x (c:c':cs)
| c == '\"' = (x=:="") &> Text t : (stateScan line (Code y) y (c':cs))
| c == '\\' = (x=:=c:c':y) &> stateScan line (Text t) y cs
| elem c toBeEscaped = error $ "unescaped "++c:" encountered in line "
++ show line
| otherwise = (x=:=c:y) &> stateScan line (Text t) y (c':cs)
where
y free
modHead :: ([Char] -> [Char]) -> Tokens -> Tokens
modHead fs (Code c : ts)
= case break (\x->x=='\n'||x=='\r') c of
("","") -> modHead fs ts
("",n:rest) -> modHead (fs . (n:)) (Code rest : ts)
(line,"") -> if any (lineBeginsWith line) headers
then ModuleHead (fs line) : (modHeadInLine id ts)
else maybeMo (fs "") (Code c:ts)
(line,n:rest)-> if any (lineBeginsWith line) headers
then modHead (fs . (line++) . (n:)) (Code rest:ts)
else maybeMo (fs "") (Code c : ts)
modHead fs (BigComment c : ts) = maybeMo (fs "") (BigComment c : (modHead id ts))
modHead fs (SmallComment c : ts) = maybeMo (fs "") (SmallComment c : (modHead id ts))
modHead fs (Meta c : ts) = maybeMo (fs "") (Meta c : ts)
modHead fs [] = maybeMo (fs "") []
modHeadInLine :: ([Char] -> [Char]) -> Tokens -> Tokens
modHeadInLine fs [] = maybeMo (fs "") []
modHeadInLine fs (Code c : ts)
= case break (\x->x=='\n'||x=='\r') c of
(line,n:rest) -> modHead (fs . (line++) . (n:)) (Code rest : ts)
_ -> modHead fs (Code c : ts)
modHeadInLine fs (BigComment c : ts) =
maybeMo (fs "") (BigComment c : (modHeadInLine id ts))
modHeadInLine fs (SmallComment c : ts) =
maybeMo (fs "") (SmallComment c : (modHeadInLine id ts))
modHeadInLine fs (Meta c : ts) = maybeMo (fs "") (Meta c : ts)
headers :: [[Char]]
headers = [" ","import","infix","infixl","infixr","module"]
lineBeginsWith :: [Char] -> [Char] -> Bool
lineBeginsWith line s | length line < lens = False
| otherwise
= line==s ||
let (s',rest) = splitAt (length s) line
in s==s' && (null rest || isSep (head rest))
where
lens = length s
isSep :: Char -> Bool
isSep c = isSpace c || elem c infixIDs || elem c "([{"
infixIDs :: String
infixIDs = "~!@#$%^&*+-=<>?./|\\:"
delimiters :: String
delimiters = " ([{,}])"
toBeEscaped :: [Char]
toBeEscaped = "\\\n\r\t\""
maybeCode :: [Char] -> Tokens -> Tokens
maybeCode s ts = {-if s=="" then ts else-} Code s:ts
maybeMo s ts = if s=="" then ts else ModuleHead s:case ts of
Code c:ts' -> Code ('\n':c):ts'
_ -> ts
--- Yields the program code without comments
--- (but with the line breaks for small comments).
plainCode :: Tokens -> String
plainCode (ModuleHead s:ts) = case ts of
Code c : ts' -> s++drop 1 c++plainCode ts'
_ -> s++plainCode ts
plainCode (Code s:ts) = s++plainCode ts
plainCode (Text s:ts) = '\"':s++'\"':plainCode ts
plainCode (Letter s:ts) = '\'':s++'\'':plainCode ts
plainCode (BigComment _:ts) = plainCode ts
plainCode (SmallComment _:ts) = plainCode ts
plainCode (Meta s:ts) = "{+"++s++"+}"++plainCode ts
plainCode [] = ""
--- Inverse function of scan, i.e., unscan (scan x) = x.
--- unscan is used to yield a program after changing the list of tokens.
unscan :: Tokens -> String
unscan (ModuleHead s:ts) = s++case ts of
(Code (_:c):ts') -> unscan (Code c:ts')
_ -> unscan ts
unscan (Code s:ts) = s++unscan ts
unscan (Text s:ts) = '\"':s++'\"':unscan ts
unscan (Letter s:ts) = '\'':s++'\'':unscan ts
unscan (BigComment s:ts) = "{-"++s++"-}"++unscan ts
unscan (SmallComment s:ts) = "--"++s++unscan ts
unscan (Meta s:ts) = "{+"++s++"+}"++unscan ts
unscan [] = ""
--- return tokens for given filename
readScan :: [Char] -> IO Tokens
readScan fn = readFile fn >>= return . scan
--- test whether (unscan . scan) is identity
testScan :: [Char] -> IO ()
testScan fn = do c <- readFile fn
print (unscan (scan c)==c)
testWeave :: [Char] -> IO ()
testWeave fn = do c <- readFile fn
print (unscan (weave (unweaveCode (scan c)))==c)
# cass-analysis - Base libraries and implementation of program analyses for CASS
This directory contains the implementation of various
program analyses which can be used with CASS
(the Curry Analysis Server System), available in the package `cass`.
{
"name": "cass-analysis",
"version": "0.0.4",
"author": "Michael Hanus <mh@informatik.uni-kiel.de>",
"synopsis": "Libraries with various compile-time analyses for Curry",
"category": [ "Analysis" ],
"dependencies": {
},
"source": {
"git": "https://git.ps.informatik.uni-kiel.de/curry-packages/cass-analysis.git",
"tag": "$version"
}
}
------------------------------------------------------------------------------
--- Demandedness analysis:
--- checks whether functions demands a particular argument, i.e.,
--- delivers only bottom if some argument is bottom.
---
--- @author Michael Hanus
--- @version May 2013
------------------------------------------------------------------------------
module Analysis.Demandedness
where
import Analysis.Types
import FlatCurry.Types
import FlatCurry.Goodies
import List((\\),intercalate)
------------------------------------------------------------------------------
--- Data type to represent information about demanded arguments.
--- Demanded arguments are represented as a list of indices
--- for the arguments, where arguments are numbered from 1.
type DemandedArgs = [Int]
-- Show determinism information as a string.
showDemand :: AOutFormat -> DemandedArgs -> String
showDemand AText [] = "no demanded arguments"
showDemand ANote [] = ""
showDemand fmt (x:xs) =
(if fmt==AText then "demanded arguments: " else "") ++
intercalate "," (map show (x:xs))
-- Abstract demand domain.
data DemandDomain = Bot | Top
-- Least upper bound on abstract demand domain.
lub :: DemandDomain -> DemandDomain -> DemandDomain
lub Bot x = x
lub Top _ = Top
--- Demandedness analysis.
demandAnalysis :: Analysis DemandedArgs
demandAnalysis = dependencyFuncAnalysis "Demand" [1..] daFunc
-- We define the demanded arguments of some primitive prelude operations.
-- Otherwise, we analyse the right-hand sides of the rule.
daFunc :: FuncDecl -> [(QName,DemandedArgs)] -> DemandedArgs
daFunc (Func (m,f) _ _ _ rule) calledFuncs
| f `elem` prelude2s && m==prelude = [1,2]
| f `elem` prelude1s && m==prelude = [1]
| otherwise = daFuncRule calledFuncs rule
where
prelude2s = ["==","=:=","compare","<=","$#","$##","$!","$!!",
"+","-","*","div","mod","divMod","quot","rem","quotRem"]
prelude1s = ["seq","ensureNotFree","apply","cond","=:<=","negateFloat"]
-- TODO: >>= catch catchFail
daFuncRule :: [(QName,DemandedArgs)] -> Rule -> DemandedArgs
daFuncRule _ (External _) = [] -- nothing known about other externals
daFuncRule calledFuncs (Rule args rhs) =
map fst
(filter ((==Bot) . snd)
(map (\botarg -> (botarg,absEvalExpr rhs [botarg])) args))
where
-- abstract evaluation of an expression w.r.t. variables assumed to be Bot
absEvalExpr (Var i) bvs = if i `elem` bvs then Bot else Top
absEvalExpr (Lit _) _ = Top
absEvalExpr (Comb ct g es) bvs =
if ct == FuncCall
then maybe (error $ "Abstract value of " ++ show g ++ " not found!")
(\gdas -> let curargs = map (\ (i,e) -> (i,absEvalExpr e bvs))
(zip [1..] es)
cdas = gdas \\
(map fst (filter ((/=Bot) . snd) curargs))
in if null cdas then Top else Bot)
(lookup g calledFuncs)
else Top
absEvalExpr (Free _ e) bvs = absEvalExpr e bvs
absEvalExpr (Let bs e) bvs = absEvalExpr e (absEvalBindings bs bvs)
absEvalExpr (Or e1 e2) bvs = lub (absEvalExpr e1 bvs) (absEvalExpr e2 bvs)
absEvalExpr (Case _ e bs) bvs =
if absEvalExpr e bvs == Bot
then Bot
else foldr lub Bot (map absEvalBranch bs)
where absEvalBranch (Branch _ be) = absEvalExpr be bvs
absEvalExpr (Typed e _) bvs = absEvalExpr e bvs
-- could be improved with local fixpoint computation
absEvalBindings [] bvs = bvs
absEvalBindings ((i,exp) : bs) bvs =
let ival = absEvalExpr exp bvs
in if ival==Bot
then absEvalBindings bs (i:bvs)
else absEvalBindings bs bvs
prelude :: String
prelude = "Prelude"
------------------------------------------------------------------------------
------------------------------------------------------------------------------
--- Determinism analysis:
--- checks whether functions are deterministic or nondeterministic, i.e.,
--- whether its evaluation on ground argument terms might cause
--- different computation paths.
---
--- @author Michael Hanus
--- @version August 2016
------------------------------------------------------------------------------
module Analysis.Deterministic
( overlapAnalysis, showOverlap, showDet
, functionalAnalysis, showFunctional
, Deterministic(..),nondetAnalysis
, showNonDetDeps, nondetDepAnalysis, nondetDepAllAnalysis
) where
import Analysis.Types
import Char(isDigit)
import FlatCurry.Types
import FlatCurry.Goodies
import List
import Sort(sort)
------------------------------------------------------------------------------
-- The overlapping analysis can be applied to individual functions.
-- It assigns to a FlatCurry function definition a flag which is True
-- if this function is defined with overlapping left-hand sides.
overlapAnalysis :: Analysis Bool
overlapAnalysis = simpleFuncAnalysis "Overlapping" isOverlappingFunction
isOverlappingFunction :: FuncDecl -> Bool
isOverlappingFunction (Func _ _ _ _ (Rule _ e)) = orInExpr e
isOverlappingFunction (Func f _ _ _ (External _)) = f==("Prelude","?")
-- Check an expression for occurrences of OR:
orInExpr :: Expr -> Bool
orInExpr (Var _) = False
orInExpr (Lit _) = False
orInExpr (Comb _ f es) = f==(pre "?") || any orInExpr es
orInExpr (Free _ e) = orInExpr e
orInExpr (Let bs e) = any orInExpr (map snd bs) || orInExpr e
orInExpr (Or _ _) = True
orInExpr (Case _ e bs) = orInExpr e || any orInBranch bs
where orInBranch (Branch _ be) = orInExpr be
orInExpr (Typed e _) = orInExpr e
-- Show overlapping information as a string.
showOverlap :: AOutFormat -> Bool -> String
showOverlap _ True = "overlapping"
showOverlap AText False = "non-overlapping"
showOverlap ANote False = ""
------------------------------------------------------------------------------
-- The functional analysis is a global function dependency analysis.
-- It assigns to a FlatCurry function definition a flag which is True
-- if this function is purely functional defined, i.e., its definition
-- does not depend on operation containing overlapping rules or free variables.
functionalAnalysis :: Analysis Bool
functionalAnalysis = dependencyFuncAnalysis "Functional" True isFuncDefined
-- Show functionally defined information as a string.
showFunctional :: AOutFormat -> Bool -> String
showFunctional _ True = "functional"
showFunctional AText False = "defined with logic features"
showFunctional ANote False = ""
-- An operation is functionally defined if its definition is not
-- non-deterministic (no overlapping rules, no extra variables) and
-- it depends only on functionally defined operations.
isFuncDefined :: FuncDecl -> [(QName,Bool)] -> Bool
isFuncDefined func calledFuncs =
not (isNondetDefined func) && and (map snd calledFuncs)
-- Is a function f defined to be potentially non-deterministic, i.e.,
-- is the rule non-deterministic or does it contain extra variables?
isNondetDefined :: FuncDecl -> Bool
isNondetDefined (Func f _ _ _ rule) =
f `notElem` (map pre ["failed","$!!","$##","normalForm","groundNormalForm"])
-- these operations are internally defined in PAKCS with extra variables
&& isNondetRule rule
where
isNondetRule (Rule _ e) = orInExpr e || extraVarInExpr e
isNondetRule (External _) = f==("Prelude","?")
-- check an expression for occurrences of extra variables:
extraVarInExpr :: Expr -> Bool
extraVarInExpr (Var _) = False
extraVarInExpr (Lit _) = False
extraVarInExpr (Comb _ _ es) = or (map extraVarInExpr es)
extraVarInExpr (Free vars e) = (not (null vars)) || extraVarInExpr e
extraVarInExpr (Let bs e) = any extraVarInExpr (map snd bs) || extraVarInExpr e
extraVarInExpr (Or e1 e2) = extraVarInExpr e1 || extraVarInExpr e2