@misc{Kiwiel_Krzysztof_Convergence_2005, author={Kiwiel, Krzysztof}, copyright={Creative Commons Attribution BY 4.0 license}, address={Warszawa}, journal={Raport Badawczy = Research Report}, howpublished={online}, year={2005}, publisher={Instytut Badań Systemowych. Polska Akademia Nauk}, publisher={Systems Research Institute. Polish Academy of Sciences}, language={eng}, abstract={The paper deals with the gradient sampling algorithm of Burke, Lewis and Overton for minimizing a locally Lipschitz function f on Rn that is continuously differentiable on an open dense subset. The authors strengthened the existing convergence results for this algorithm, and introduce a slightly revised version for which stronger results are established with­out requiring compactness of the level sets of f. In particular, it has been shown that with probability 1 the revised algorithm either drives the f -values to -∞, or each of its cluster points is Clarke stationary for f. A simplified variant was also considered in which the differentiability check is skipped and the user can control the number of f -evaluations per iteration.}, title={Convergence of the Gradient Sampling Algorithm for Nonsmooth Nonconvex Optimization}, type={Text}, URL={http://rcin.org.pl/Content/139664/PDF/RB-2005-48.pdf}, keywords={Nonsmooth optimization, Gradient sampling, Generalized gradient, Nonconvex, Subgradient, Gradient uogólniony, Optymalizacja niegładka, Próbkowanie gradientowe, Niewypukły}, }