<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>UgoTrade &#187; the internet of things</title>
	<atom:link href="https://www.ugotrade.com/tag/the-internet-of-things/feed/" rel="self" type="application/rss+xml" />
	<link>https://www.ugotrade.com</link>
	<description>Augmented Realities at the Edge of the Network</description>
	<lastBuildDate>Wed, 25 May 2016 15:59:56 +0000</lastBuildDate>
	<language>en-US</language>
		<sy:updatePeriod>hourly</sy:updatePeriod>
		<sy:updateFrequency>1</sy:updateFrequency>
	<generator>https://wordpress.org/?v=3.9.40</generator>
	<item>
		<title>The Next Wave of AR: Mobile Social Interaction Right Here, Right Now!</title>
		<link>https://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/</link>
		<comments>https://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/#comments</comments>
		<pubDate>Fri, 20 Nov 2009 04:53:07 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial general Intelligence]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[AR browsers]]></category>
		<category><![CDATA[AR Dev camp]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[calo]]></category>
		<category><![CDATA[mobile social]]></category>
		<category><![CDATA[mobile social interaction utility]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[pygowave]]></category>
		<category><![CDATA[real time internet]]></category>
		<category><![CDATA[siri]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality]]></category>
		<category><![CDATA[The Copenhagen Wheel]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the outernet]]></category>
		<category><![CDATA[the sentient city]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Web Squared]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4869</guid>
		<description><![CDATA[The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now! View more presentations from Tish Shute. Click on the image below or here to watch this presentation and others from Momo13]]></description>
				<content:encoded><![CDATA[<div id="__ss_2542526" style="width: 425px; text-align: left;"><a style="font:14px Helvetica,Arial,Sans-serif;display:block;margin:12px 0 3px 0;text-decoration:underline;" title="The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!" href="http://www.slideshare.net/TishShute/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526">The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!</a><object style="margin:0px" classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="425" height="355" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowScriptAccess" value="always" /><param name="src" value="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" /><param name="allowfullscreen" value="true" /><embed style="margin:0px" type="application/x-shockwave-flash" width="425" height="355" src="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<div style="font-size: 11px; font-family: tahoma,arial; height: 26px; padding-top: 2px;">View more <a style="text-decoration:underline;" href="http://www.slideshare.net/">presentations</a> from <a style="text-decoration:underline;" href="http://www.slideshare.net/TishShute">Tish Shute</a>.</div>
<p>Click on the image below or <a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank">here to watch</a> this presentation and others from <a href="http://www.mobilemonday.nl/">Momo13</a></div>
<p><a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank"><img class="alignnone size-medium wp-image-4876" title="Screen shot 2009-11-20 at 1.32.24 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-20-at-1.32.24-PM-300x167.png" alt="Screen shot 2009-11-20 at 1.32.24 PM" width="300" height="167" /></a></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/feed/</wfw:commentRss>
		<slash:comments>4</slash:comments>
		</item>
		<item>
		<title>Total Immersion and the &#8220;Transfigured City:&#8221; Shared Augmented Realities, the &#8220;Web Squared Era,&#8221; and Google Wave</title>
		<link>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/</link>
		<comments>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/#comments</comments>
		<pubDate>Sun, 27 Sep 2009 04:42:42 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[3D Interactive Live Show]]></category>
		<category><![CDATA[Acrossair]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[Amphibious Architecture]]></category>
		<category><![CDATA[anime]]></category>
		<category><![CDATA[Apple iPhone]]></category>
		<category><![CDATA[AR baseball cards for Topps]]></category>
		<category><![CDATA[AR Consortium]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR goggles]]></category>
		<category><![CDATA[Architectural League of New York]]></category>
		<category><![CDATA[ARML]]></category>
		<category><![CDATA[ARN]]></category>
		<category><![CDATA[Augmented City]]></category>
		<category><![CDATA[augmented city lab]]></category>
		<category><![CDATA[augmented reality books]]></category>
		<category><![CDATA[augmented reality entrpreneurship]]></category>
		<category><![CDATA[augmented reality goggles]]></category>
		<category><![CDATA[augmented reality making visible the invisible]]></category>
		<category><![CDATA[augmented reality mark-up language]]></category>
		<category><![CDATA[augmented reality pollution meter]]></category>
		<category><![CDATA[augmented reality standards]]></category>
		<category><![CDATA[augmented reality toys]]></category>
		<category><![CDATA[augmented virtuality]]></category>
		<category><![CDATA[Bionic Eye]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Bruno Uzzan]]></category>
		<category><![CDATA[Conflux]]></category>
		<category><![CDATA[cross platform compatibility for augmented reality]]></category>
		<category><![CDATA[D'Fusion]]></category>
		<category><![CDATA[Daniel Wagner]]></category>
		<category><![CDATA[Denno Coil]]></category>
		<category><![CDATA[distributed]]></category>
		<category><![CDATA[elements of networked urbanism]]></category>
		<category><![CDATA[Elizabeth Goodman]]></category>
		<category><![CDATA[everyware]]></category>
		<category><![CDATA[Fish 'n Microchips]]></category>
		<category><![CDATA[Flickr]]></category>
		<category><![CDATA[Gavin Starks]]></category>
		<category><![CDATA[Gene Becker]]></category>
		<category><![CDATA[geo spatial web]]></category>
		<category><![CDATA[geoAR]]></category>
		<category><![CDATA[geoaugmentation]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Google Wave Protocol]]></category>
		<category><![CDATA[Gov 2.0 Expo Showcase]]></category>
		<category><![CDATA[Gov 2.0 Summit]]></category>
		<category><![CDATA[Graz University of Technology]]></category>
		<category><![CDATA[Imagination]]></category>
		<category><![CDATA[Incheon Free Economic Zone]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[Int13]]></category>
		<category><![CDATA[Interaction Design for Augmented Reality]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[Jeremy Hight]]></category>
		<category><![CDATA[Joe Lamantia]]></category>
		<category><![CDATA[Jonathan Laventhol]]></category>
		<category><![CDATA[Korea's u-Cities]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Layar 3D]]></category>
		<category><![CDATA[magic lens augmented reality]]></category>
		<category><![CDATA[manga]]></category>
		<category><![CDATA[Mark Shepard]]></category>
		<category><![CDATA[Mark Weiser]]></category>
		<category><![CDATA[markerless mobile augmented reality]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Microsoft Bing]]></category>
		<category><![CDATA[Mike Kuniavsky]]></category>
		<category><![CDATA[Mobilizy]]></category>
		<category><![CDATA[multiuser augmented reality]]></category>
		<category><![CDATA[Natalie Jeremijenko]]></category>
		<category><![CDATA[Natural Fuse]]></category>
		<category><![CDATA[near-field object rcognition and tracking]]></category>
		<category><![CDATA[Networked City]]></category>
		<category><![CDATA[networked urbanism]]></category>
		<category><![CDATA[newer urbanism]]></category>
		<category><![CDATA[open]]></category>
		<category><![CDATA[open augmented reality framework]]></category>
		<category><![CDATA[open augmented reality network]]></category>
		<category><![CDATA[Orange Cone]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[realtime panorama mapping on mobile phones]]></category>
		<category><![CDATA[RobotVision]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Sentient City Survival Kit]]></category>
		<category><![CDATA[Shangri La]]></category>
		<category><![CDATA[shared augmented realities]]></category>
		<category><![CDATA[Sky Writer]]></category>
		<category><![CDATA[Steven Feiner]]></category>
		<category><![CDATA[symbiosis between augmented reality and brands]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the LAN of things]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[the web squared era]]></category>
		<category><![CDATA[ThingM]]></category>
		<category><![CDATA[things as services]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[Tod E. Kurt]]></category>
		<category><![CDATA[Total Immersion]]></category>
		<category><![CDATA[Toward the Sentient City]]></category>
		<category><![CDATA[Transfigured City]]></category>
		<category><![CDATA[twitter]]></category>
		<category><![CDATA[u-City]]></category>
		<category><![CDATA[ubiquitous computing and augmented reality]]></category>
		<category><![CDATA[uCity]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Weisarian Ubiquitous Computing]]></category>
		<category><![CDATA[Wikitude]]></category>
		<category><![CDATA[xClinic]]></category>
		<category><![CDATA[XMPP versus HTTP]]></category>
		<category><![CDATA[Yocahi Benkler]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4439</guid>
		<description><![CDATA[Above is an image aboveÂ  from Total Immersion&#8217;s augmented reality experience developed for the &#8220;Networked City&#8221; exhibition in South Korea, &#8211; &#8220;a fun scenario created for a u-City&#8217;s infrastructure and city management service&#8221; &#8220;To the naked eye, the exhibit looks like a bare bones model of a city. But when visitors put on the special [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_338cwpzntgp_b.jpg"><img class="alignnone size-medium wp-image-4440" title="dhj5mk2g_338cwpzntgp_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_338cwpzntgp_b-300x170.jpg" alt="dhj5mk2g_338cwpzntgp_b" width="300" height="170" /></a></p>
<p><em>Above is an image aboveÂ  from <a href="http://www.t-immersion.com/" target="_blank">Total Immersion&#8217;s</a> augmented reality experience developed for the <a id="winm" title="&quot;Networked City&quot; exhibition in South Korea, &quot;" href="http://www.tomorrowcity.or.kr/sv_web/en_US/space.SpaceInfo.web?targetMethod=DoUe04Sub1" target="_blank">&#8220;Networked City&#8221; exhibition in South Korea,</a> &#8211; &#8220;a fun scenario created for a<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank"> u-City&#8217;s</a> infrastructure and city management service&#8221; </em></p>
<p><strong>&#8220;To the naked eye, the exhibit looks like a bare bones model of a city. But when visitors put on the special AR goggles a whole new world unfolds â€“ as graphics overlaid on the city model.</strong><em><strong>&#8221; </strong>(<a href="http://gamesalfresco.com/2009/09/14/total-immersion-brings-augmented-reality-to-tomorowcity-todaytomorrow/" target="_blank">Games Alfresco)</a></em></p>
<p>&#8220;The Networked City,&#8221; is a large scale augmented virtuality of a scenario for a networked city. But my guess, reading the &nbsp; &nbsp;    <em><a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">Korea IT Times</a></em>, is the plan is to move from an augmented virtuality to an augmented reality as Incheon Free Economic ZoneÂ  (IFEZ) realizes its vision to become a leading u-City &#8211; where reality is turned &#8220;inside out&#8221; (see <a id="x:2w" title="Inside Out Reality" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php">Inside Out: Interaction Design for Augmented Reality )</a>.Â <a id="x:2w" title="Inside Out Reality" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php"> </a>If you are not familiar with South Korea&#8217;s u-Cities, <a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">check out this post</a> for a short primer (and note<a href="http://www.google.com/trends?q=augmented+reality&amp;ctab=1986817859&amp;geo=all&amp;date=all" target="_blank"> Google Trends search on Augmented Reality </a>showsÂ  South Korea leaving everyone else in the dust).<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank"></p>
<p></a></p>
<h3>Ubiquitous computing and augmented reality are like adenine and thymine &#8211; a DNA base pair.</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-24-at-11.34.35-PM.png"><img class="alignnone size-medium wp-image-4442" title="Screen shot 2009-09-24 at 11.34.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-24-at-11.34.35-PM-300x256.png" alt="Screen shot 2009-09-24 at 11.34.35 PM" width="300" height="256" /></a></p>
<p><em>A sky view of Incheon Free Economic Zone (<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">from Korean IT Times</a>). For more on the IFEZ vision to become a leading u-City <a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">see here</a>.</em></p>
<p><a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">Korea IT Times</a> writes about the u-city concept:</p>
<p><strong>&#8220;Korea began using the term u-City after accepting the concept of ubiquitous computing, a post-desktop model of human-computer interaction created by Mark Weiser, the chief technologist of the Xerox Palo Alto Research Center in California, in 1998. There have been a lot of research in this field since 2002. As a result, many local governments in Korea have applied this concept to various development projectsÂ since 2005Â based on a practical approach to it.&#8221;</strong></p>
<p>The back story to many of my recent posts, including this one, is an understanding of a relationship between ubiquitous computing and augmented reality that emerged, for me, in a February conversation with Adam Greenfield, <a title="Permanent Link to Towards a Newer Urbanism: Talking Cities, Networks, and Publics with Adam Greenfield" rel="bookmark" href="../../2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/">Towards a Newer Urbanism: Talking Cities, Networks, and Publics with Adam Greenfield</a>.Â  In cased you missed it, here is the link again because I think it holds up very well considering the rapid developments of recent months.Â  Also, importantly for this post, it includes a discussion ofÂ  moving on from Weiserian visions.</p>
<p><a href="http://speedbird.wordpress.com/" target="_blank">Adam Greenfield&#8217;s Speedbird</a> is one of my key sources for understanding &#8220;networked urbanism,&#8221; and the list he makes of <a href="http://speedbird.wordpress.com/2009/03/22/the-elements-of-networked-urbanism/" target="_blank">the elements of networked urbanism here</a> (also see the comments) &#8211; is my mantra for thinking about the DNA base pair relationship of augmented reality and ubiquitous computing.</p>
<p>Adam Greenfield&#8217;s, <strong>&#8220;summary of what those of us who are thinking, writing and speaking about networked urbanism seem to be seeing&#8221;</strong> is:</p>
<p><strong>1. From <em>latent</em> to <em>explicit</em>; 2. From <em>browse</em> to <em>search</em>; 3. From <em>held</em> to <em>shared</em>; 4. From <em>expiring</em> to <em>persistent</em>; 5. From <em>deferred</em> to <em>real-time</em>; 6. From <em>passive</em> to <em>interactive</em>; 7. From <em>component</em> to <em>resource</em>; 8. From <em>constant</em> to <em>variable</em>; 9. From <em>wayfinding</em> to <em>wayshowing</em>; 10. From <em>object</em> to <em>service</em>; 11. From <em>vehicle</em> to <em>mobility</em>; 12. From <em>community</em> to <em>social network</em>; 13. From <em>ownership</em> to <em>use</em>; 14. From <em>consumer</em> to <em>constituent</em>.</strong></p>
<p><strong></p>
<p></strong></p>
<h3>Augmented Reality &#8211; Making Visible the Invisible</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-2.44.27-PM.png"><img class="alignnone size-medium wp-image-4509" title="Screen shot 2009-09-26 at 2.44.27 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-2.44.27-PM-300x229.png" alt="Screen shot 2009-09-26 at 2.44.27 PM" width="300" height="229" /></a></p>
<p>The screenshot above is one ofÂ  the coolest &#8220;making visible the invisible&#8221; AR applications. It was developed at Columbia University Graphics and User Interface Lab where <a href="http://www1.cs.columbia.edu/%7Efeiner/" target="_blank">Steven Feiner</a> is Director (see the deep list of projects from the lab <a href="http://graphics.cs.columbia.edu/top.html" target="_blank">here</a>).Â  This app &#8220;shows carbon monoxide levels projected over New York City. The height of each ball reflects concentrations of the pollutant.&#8221; Credit: Sean White and Steven FeinerÂ  (<a href="http://www.technologyreview.com/computing/23515/page2/" target="_blank">via Technology Review</a>).</p>
<p>The recent emergence of &#8220;magic lens&#8221; augmented reality apps for our smart phones &#8211; <a href="http://www.wikitude.org/" target="_blank">Wikitude</a>, <a href="http://layar.com/" target="_blank">Layar,</a> <a href="http://www.acrossair.com/" target="_blank">Acrossair</a>, <a href="http://support.sekaicamera.com/">Sekai Camera</a>, and many others now, have given us a new window into our cities. But we are yet to realize the full potential of the AR/ubicomp base pair that can &#8220;make visible the invisible&#8221; and give us new opportunities to relate to the invisible data ecosystems of our cities, not merely as a spectator experience,Â  but as an interactive, in context, real time opportunity to reimagine social relations.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=3" target="_blank">Mark Shepard</a> says in <a href="http://www.sentientcity.net/exhibit/?p=3" target="_blank">his curatorial statement</a> for, <a href="http://www.sentientcity.net/exhibit/" target="_blank">&#8220;Toward the Sentient City:&#8221;</a> (Much more soon on this very significant exhibit which runs from Sept. 17th to Nov. 7th, 2009.)</p>
<p><strong>&#8220;In place of natural weather systems, however, today we find the dataclouds of 21st century urban space increasingly shaping our experience of this city and the choices we make there.&#8221;</strong></p>
<p>Augmented Reality, as Joe Lamantia points out, is becoming the great &#8220;<a id="o0mh" title="ambassador of ubiqitous computing" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php">ambassador of ubiqitous computing</a>.&#8221; AR is. &#8220;<strong>&#8230;mak[ing] it possible to experience the new world of ubiquitous computing by reifying the digital layer that permeates our inside-out world,&#8221; </strong>and we are only just glimpsing the razor thin end of the wedge in this regard.</p>
<p>I am still working on my <a href="http://www.gov2summit.com/" target="_blank">Gov 2.0 Summit </a>write upÂ  and, amongst other things, I will talk about how an emerging new social contract around open data, here in the US,Â  will put augmented realityÂ  apps center stageÂ  &#8211; &#8220;doing stuff that matters.&#8221; At <a href="http://www.gov2expo.com/gov2expo2009" target="_blank">Gov 2.0 Expo Showcase</a> Tim O&#8217;Reilly tweeted:</p>
<p><a id="i23q" title="Tim O'Reilly" href="http://twitter.com/timoreilly">Tim O&#8217;Reilly</a> Really enjoyed @capttaco (Digital Arch Design) @ #gov20e: &#8220;Augmented Reality could be a new public infrastructure&#8221; <a href="http://bit.ly/18iCx" target="_blank">http://bit.ly/18iCx</a></p>
<p>Also see Tim O&#8217;Reilly and Jennifer Pahlka on Forbes.com discuss the <a href="http://www.forbes.com/2009/09/23/web-squared-oreilly-technology-breakthroughs-web2point0.html" target="_blank">The &#8220;Web Squared&#8221; Era</a> -Â <strong> &#8220;the Web Squared era is an era of augmented reality arriving (like the sensor revolution) stealthily, in more pedestrian clothes than we expected</strong>.<strong>&#8230; &#8230;our world will have &#8220;<a href="http://www.orangecone.com/archives/2009/02/smart_things_an.html" target="_blank">information shadows</a>.&#8221; Augmented reality amounts to information shadows made visible.&#8221;</strong></p>
<p>Again there is back story to how I came to think about Information Shadows in relation to augmented reality.Â  So in case your missed it the first time, here is the link to a conversation that began in a hallway meeting between Tim O&#8217;Reilly, Mike Kuniavsky, <a href="http://thingm.com/" target="_blank">ThingM</a>, Usman Haque, <a href="http://www.pachube.com/" target="_blank">Pachube</a>, and Gavin Starks, <a href="http://www.amee.com/" target="_blank">AMEE</a>, at <a href="http://en.oreilly.com/et2009/" target="_blank">ETech earlier this year</a>,Â  <a title="Permanent Link to Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009" rel="bookmark" href="../../2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/">&#8220;Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009</a>.&#8221;</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.32.09-PM.png"><img class="alignnone size-medium wp-image-4547" title="Screen shot 2009-09-26 at 9.32.09 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.32.09-PM-300x225.png" alt="Screen shot 2009-09-26 at 9.32.09 PM" width="300" height="225" /></a></p>
<p><a href="http://www.slideshare.net/rlenz/augmented-city-lab-picnic-09" target="_blank">Slide from Augmented City Lab</a> @ <a href="http://www.picnicnetwork.org/" target="_blank">Picnic &#8217;09</a></p>
<h3>So What&#8217;s Next for Mobile Augmented Reality?</h3>
<p><a href="http://www.youtube.com/watch?v=434zw201iN8&amp;feature=player_embedded" target="_blank"><img class="alignnone size-medium wp-image-4513" title="Screen shot 2009-09-26 at 3.45.45 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-3.45.45-PM-300x186.png" alt="Screen shot 2009-09-26 at 3.45.45 PM" width="300" height="186" /></a></p>
<p>These videos from Daniel Wagner&#8217;s team from Graz University of Technology showing <a href="http://www.youtube.com/watch?v=434zw201iN8&amp;feature=player_embedded" target="_blank">Realtime Panorama Mapping and Tracking on Mobile Phones</a> and <a href="http://www.youtube.com/watch?v=W-mJG3peIXA&amp;feature=player_embedded" target="_blank">Creating an Indoor Panorama in Realtime</a>, as Rouli from Games Alfresco points out,Â  indicate that there is a lot in store for us at <a href="http://www.icg.tugraz.at/Members/daniel/MultipleTargetDetectionAndTrackingWithGuaranteedFrameratesOnMobilePhones/inproceedings_view">ISMAR09</a>.</p>
<p>We may not be so impressed by directory style/&#8221;post it&#8221; AR anymore, as these applications have become common place so quickly!Â  But while these early mobile AR apps may be disappointing in relation to some futurist visions of AR &#8211; merely AR/ubicomp appetizers,Â  there are still good implementations of this model coming out (see new comers to the app store<a id="tzvf" title="Bionic Eye" href="http://mashable.com/2009/09/24/bionic-eye/" target="_blank"> Bionic Eye</a> and <a href="http://www.readwriteweb.com/archives/robotvision_a_bing-powered_iphone_augmented_realit.php" target="_blank">RobotVision</a>). And <a href="http://layar.com/" target="_blank">Layar,</a> always on the ball, has upped the ante for the new cohort of AR Browsers with <a href="http://layar.com/3d/" target="_blank">Layar 3D</a>.</p>
<p>But as Bruce Sterling <a href="http://www.wired.com/beyond_the_beyond/2009/09/augmented-reality-robotvision/" target="_blank">notes here</a>:</p>
<p><strong>*In AR, everybody wants to be the platform and the browser, and nobody wants to be the boring old geolocative database. Look how Tim [creator of RobotVision] here, who is like one guy working on his weekends, can boldly fold-in the multi-billion dollar, multi-million user empires of Apple iPhone, Microsoft Bing, Flickr, and Twitter, all under his right thumb</strong></p>
<p> (watch <a id="qxek" title="video here" href="http://www.youtube.com/watch?v=hWC9gax7SCA&amp;feature=player_embedded">video here</a>)</p>
<p>But ifÂ  you looking for something more from AR, you probably won&#8217;t have to wait too long.Â  The two pioneering companies in AR, <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> &#8211; founded in 1999, and <a href="http://www.metaio.com/" target="_blank">Metaio</a> &#8211; founded in 2003 are both coming out with &#8220;mobile augmented reality platforms&#8221; in a matter of weeks (see press releases <a href="http://augmented-reality-news.com/2009/09/14/bringing-its-augmented-reality-to-mobile-applications-total-immersion-partners-with-smartphones-app-provider-int13/" target="_blank">here</a> and <a href="http://gamesalfresco.com/2009/09/18/metaio-announcing-mobile-augmented-reality-platform-junaio/" target="_blank">here</a>).Â  And both companies, it seems, will deploy much more sophisticated AR rendering and tracking than we have seen to date.</p>
<p>I approached Bruno Uzzan, founder and CEO of Total Immersion, for an interview as part of my look at the new industry of augmented reality through the eyes of the founding members of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a>. These consortium members are some of the first commercial augmented reality companies.</p>
<p><a href="#jumpto">The interview below</a> with Bruno began early this summer and then we both went on vacation and it picks up after the announcement of the <a href="http://www.int13.net/blog/en/" target="_blank">partnership between Total Immersion and Int13</a>.</p>
<p>The significance of this announcement is that Total Immersion is now positioned to take the augmented reality experiences they have developed for a number of top brands onto multiple mobile platforms with, &#8220;<strong>Int13&#8242;s very clever embedded solution that allows our [Total Immersion's] solutions to work across many [mobile] platforms,&#8221; </strong>while Int13 gets to extend their reach.</p>
<p>Total Immersion has a 50 person R&amp;D team and their two main focuses have been, firstly getting:<strong> </strong></p>
<p><strong>&#8220;Augmented Reality to work with as many platforms as possible &#8211; PC, Mac, Mobile, Game Consoles, all those are the platforms that we are targeting. We are currently doing lot of work in the R &amp; D team in cross platform compatibility&#8230;.&#8221;</strong></p>
<p>and, secondly:<strong></p>
<p></strong></p>
<p><strong>&#8220;Our R&amp;D guys are working on the real world interacting more with the virtual world.Â  And I have started seeing some results which are pretty much crazy and this will be ready for next year.&#8221;</strong></p>
<p><strong></p>
<p></strong></p>
<h3>Pandora&#8217;s Box &#8211; Shared Augmented Realities</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-1.18.15-AM.png"><img class="alignnone size-medium wp-image-4450" title="Screen shot 2009-09-25 at 1.18.15 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-1.18.15-AM-186x300.png" alt="Screen shot 2009-09-25 at 1.18.15 AM" width="186" height="300" /></a></p>
<p>Spes or &#8220;Hope&#8221;; <a title="Engraving" href="http://en.wikipedia.org/wiki/Engraving">engraving</a> by <a title="Sebald Beham" href="http://en.wikipedia.org/wiki/Sebald_Beham">Sebald Beham</a>, German c1540 (see <a href="http://en.wikipedia.org/wiki/Pandora%27s_box" target="_blank">Wikipedia article on Pandora&#8217;s Box</a>)</p>
<p>There are many weaknesses to the mobile smart phone AR experiences we have now, and the lack of near field object recognition (to date), and difficulties with accurate positioning aren&#8217;t the only ones.Â  Note re solving positioning problems in mobile AR, we are yet to see ARÂ  leverage public libraries for analyzing scenes like Flickr&#8217;s geo tagged photos, see Aaron Straup Copesâ€™s work on <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">â€œThe Shape of Alpha.â€</a> And for more on this <a href="http://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/" target="_blank">my post here</a>.</p>
<p>But, as Joe Lamantia points out:</p>
<p><strong>â€œOne of the weakest aspects of the existing interaction patterns for augmented reality is their reliance on single-person, socially disconnected user experiences.â€</strong></p>
<p>In my view, <strong>The Pandora&#8217;s Box of Augmented Realities</strong> is an open, distributed, multiuser augmented reality framework, fully integrated with the internet and world wide web.</p>
<p>As Yochai Benkler has pointed out many times, and argues again in, <a href="Capital, Power, and the Next Step in Decentralization" target="_blank">Capital, Power, and the Next Step in Decentralization</a>, it is &#8220;open, collaborative, distributed practices that have been at the core of what made the Internet.&#8221;Â  We have to try to make sure that open, collaborative, distributed practices are at the core of mobile augmented reality.</p>
<p><strong></p>
<p></strong></p>
<h3>Can Google Wave be the basis for an Open, Distributed, Multiuser Augmented Reality Framework?</h3>
<p><a href="http://www.lostagain.nl/tempspace/PrototypeDiagram.html" target="_blank"><img class="alignnone size-medium wp-image-4492" title="Screen shot 2009-09-25 at 11.51.20 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-11.51.20-PM-300x141.png" alt="Screen shot 2009-09-25 at 11.51.20 PM" width="300" height="141" /></a></p>
<p>I have been exploring the idea of using <a href="http://wave.google.com/" target="_blank">Google Wave </a>protocol as the basis for a distributed, multiuser open augmented reality framework with a small group of AR enthusiasts and developers. And I am happy to say the proposal is beginning to get fleshed out a little.Â  New collaborators are welcome both for &#8220;gear heady&#8221; input and use case suggestions (but re the latter, you can&#8217;t just say everything you see in <a href="http://en.wikipedia.org/wiki/Denn%C5%8D_Coil" target="_blank">Denno Coil</a>..!).</p>
<p>This effort started with Thomas Wrobel&#8217;sÂ  proposal for an Open AR Framework prototyped on IRC &#8211; see <a id="s336" title="here" href="../../2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/">here,</a> and click to enlarge the image above of, <a href="http://www.lostagain.nl/tempspace/PrototypeDiagram.html" target="_blank">&#8220;Sky Writer: Basic Concept for an Open Multi-source AR Framework.&#8221;</a></p>
<p>But recently we began looking at the <a href="Wave Federation Protocol" target="_blank">Wave Federation Protocol</a>.Â  And, if you check out <a id="ogbq" title="this post," href="http://www.jasonkolb.com/weblog/2009/09/why-google-wave-is-the-coolest-thing-since-sliced-bread.html#more" target="_blank">this post,</a> and <a id="c0ep" title="this post" href="http://reuvencohen.sys-con.com/node/980762" target="_blank">this post</a>, you may get a glimpse of why Google Wave protocol might be a good basis for an open, distributed, AR Framework.Â  You will notice, if you study what Google Wave has done with the XMPP protocol, that many ofÂ <a href="http://speedbird.wordpress.com/2009/03/22/the-elements-of-networked-urbanism/" target="_blank"> the elements of networked urbanism</a> that Adam Greenfield describes resonate strongly with what is being attempted in Wave.</p>
<p>But enough said for now!Â  Regardless of the details of implementation,Â  Google Wave or an AR protocol built from scratch (phew! the latter does seem like a lot of work) -Â  an open, distributed, multiuser AR framework integrated with the internet and web would explode the potential of AR, creating new possibilities for data flows, mashups ,and shared augmented realities.</p>
<p>And we are excited by Google Wave because, as Thomas puts it:</p>
<p><strong>&#8220;The really great thing wave does &#8230;.(aside from being an open standard backed by a major player&#8230;hopefully leading to thousands of worldwide servers )&#8230;.is that it allows anyone to create any number of waves, set precisely who can view or edit them, and for them to be able to be updated quickly and continuously (and even simultaneously!)</strong><strong> Better yet, changes will (if necessary) propagate to all the other servers sharing that wave. It does all this right now. From my eyes this does a lot of the work of an AR infrastructure already.</strong></p>
<p><strong>I cant see any other protocol actually doing anything like this at the moment, although correct me if I&#8217;m wrong, as alternatives are always welcome :)&#8221;</strong></p>
<p>Also, Thomas notes, <strong>&#8220;even the playback system (that is, the ability to playback the changes made to a wave since its creation) &#8230;this could give us automatically some of the ideas Jeremy Hight has mentioned in <a href="http://piim.newschool.edu/journal/issues/2009/01/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf" target="_blank">his visionary work here</a>,Â  and <a href="http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf" target="_blank">here</a> on &#8220;the geo spatial web, interlinked locations and data, immersive augmentation and open source geo augmentation.&#8221;</strong></p>
<p>One of the many reasons why an Open, distributed AR Framework would be so cool is it would open up all kinds of possibilities for <span>GeoAR</span> by providing the over-arching standard protocol for communication of updates necessary for the substandards that will facilitate <span>GeoAR</span>.</p>
<p>Also important to note is theÂ  <a id="o0is" title="Wave Federation Protocol docs which are all publicly available here" href="http://www.waveprotocol.org/" target="_blank">Wave Federation Protocol</a> allows anyone:</p>
<p><strong>&#8220;to run wave servers and become wave providers, for themselves, or as services for their users, and to &#8220;federate&#8221; waves, that is, to share waves with each other and with Google Wave. &#8211; &#8220;the federation gateway and a federation proxy and is based on open extension to <a href="http://www.waveprotocol.org/draft-protocol-spec#RFC3920">XMPP core</a> [RFC3920] protocol to allow near real-time communication between two wave servers.&#8221; See Reuven Cohen&#8217;s blog for more <a id="rmr3" title="here" href="http://reuvencohen.sys-con.com/node/980762" target="_blank">here</a> and <a id="mqxr" title="&quot;HTTP is Dead, Long Live the Real Time Cloud.&quot;" href="http://www.elasticvapor.com/2009/05/http-is-dead-long-live-realtime-cloud.html" target="_blank">here, &#8220;HTTP is Dead, Long Live the Real Time Cloud.&#8221;</a></strong></p>
<p>Still some people have expressed concern that an AR Framework using Google Wave protocol would give Google disproportionate influence. Â  Will Google-specific functionality be an issue?Â  How much stuff is Google specific just because no one else is using it (yet)? And how much is Google specific because it holds no value to anyone else but Google? These are some of the questions that have come up.</p>
<p>You are going to see a variety of suggestions for standards and specs for open AR coming out out in the next few months which as, Robert Rice of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a> points out is: <strong>&#8220;a good thing, we need that competition early on to settle down on best case.&#8221; </strong>Recently,Â <a href="http://www.mobilizy.com/" target="_blank"> Mobilizy</a> have offered up an ARML (&#8220;an augmented reality mark-up language specification based on the OpenGISÂ® KML Encoding Standard (OGC KML) with extensions&#8221;) for consideration see <a href="http://www.mobilizy.com/enpress-release-mobilizy-proposes-arml" target="_blank">here.</a></p>
<p>So it is, perhaps, also important to note, that an Open AR Framework should be neutral/transparent to techniques ofÂ  &#8220;reality recognition,&#8221;Â  and methodologies of registration/tracking, allowing various ones to work on the system as new techniques evolve, and to support as many evolving standards as possible.</p>
<p>Augmented Reality developers, like Total Immersion and others with powerful rendering/tracking AR software, should be able use an Open AR Framework to exchange the data which their tracking will use. And the tracking/rendering problems they and other researchers have solved are much harder than figuring out data exchange on on a standard infrastructure or protocol!</p>
<p>So I pricked up my ears when I heard Bruno Uzzan, CEO of <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> -Â  the first and currently the largest augmented reality company, with a 50 person R&amp;D team in France and offices in LA, where Bruno himself is now based, say: <strong>&#8220;Total Immersion isÂ  only months away from launching shared mobile augmented reality experiences using near field object recognition/tracking across multiple platforms&#8221;</strong> (for more details read my conversation with Bruno Uzzan <a href="#jumpto">below</a>).</p>
<p>I was happy when I asked Bruno about the possibilities for developing an open, distributed, multiuser augmented reality framework fully integrated with the internet and world wide web (possibly using Google Wave protocols), and he replied:</p>
<p><span id="pnk:" title="Click to view full content"><strong>&#8220;I think this is feasible. I think that&#8217;s doable, that&#8217;s justÂ  in my opinion. I mean some people might have another kind of opinion but I think that that&#8217;s definitely doable.&#8221;</strong></span></p>
<p><span title="Click to view full content"><strong></p>
<p></strong></span></p>
<h3>Total Immersion &#8211; working with the &#8220;symbiosis between augmented reality and brands&#8221;</h3>
<p><a href="http://www.youtube.com/watch?v=I7jm-AsY0lU" target="_blank"><img class="alignnone size-medium wp-image-4457" title="dhj5mk2g_344g64g96cq_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_344g64g96cq_b-300x224.png" alt="dhj5mk2g_344g64g96cq_b" width="300" height="224" /></a></p>
<p>Total Immersion has created many of the best known and most ambitious augmented reality experiences for major brands to date, including Mattel&#8217;s <a title="new toys" href="http://www.readwriteweb.com/archives/mattels_new_web-enabled_avatar_toys_will_offer_augmented_reality.php">new AR toys</a><a title="new toys" href="http://www.readwriteweb.com/archives/mattels_new_web-enabled_avatar_toys_will_offer_augmented_reality.php"><img src="http://www.uxmatters.com/mt/archives/images/new-window-arrow.gif" alt="" width="14" height="12" /></a> to be released in conjunction with the James Cameron film Avatar, and <a id="dmas" title="AR baseball cards for Topps" href="http://www.youtube.com/watch?v=I7jm-AsY0lU">AR baseball cards for Topps</a>, <a href="http://www.youtube.com/watch?v=I7jm-AsY0lU" target="_blank">video here</a> (or click screenshot above), and the <a href="http://www.publishersweekly.com/article/CA6698612.html?industryid=47152" target="_blank">UK&#8217;s first augmented reality book</a>s.</p>
<p>Bruno founded Total Immersion 10 years ago when he was just 27. And the kind of conviction it took to survive as an augmented reality business in the decade before augmented reality captured the world&#8217;s attention is remarkable.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_343dbsph2fz_b1.png"><img class="alignnone size-medium wp-image-4456" title="dhj5mk2g_343dbsph2fz_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_343dbsph2fz_b1-300x225.png" alt="dhj5mk2g_343dbsph2fz_b" width="300" height="225" /></a></p>
<p>AR&#8217;s first steps out into the world after 17 years as predominantly a lab science maybe &#8220;wobbly&#8221; (what new technology isn&#8217;t), and sometimes gloriously kitsch &#8211; check out<a id="d_eu" title="the riotus video of and AR Live Show Total Immersion produced in Korea here." href="http://www.t-immersion.com/en,video-gallery,36.html" target="_blank"> this riotus video of the 3D Interactive Live Show Total Immersion produced in Korea </a> (also see the <a href="http://augmented-reality-news.com/2009/09/15/entertainment-first-interactive-3d-live-show-now-open-in-south-korea/" target="_blank">Total Immersion Augmented Reality Blog</a> for more on the TI&#8217;s turn keyÂ  Interactive 3D Live Show Solution).</p>
<p>As Lamantia points out <a id="eo6x" title="here" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php" target="_blank">here</a>, &#8221; projecting mixed realities into public, common, or social spaces makes them  social by default.&#8221;</p>
<p>However, the potential for shared location based augmented reality experiences is as yet untapped.Â  So I see the entry of the most experienced commercial augmented reality company into mobile as pretty interesting.Â Â  WhileÂ  smart phone AR still has significant limitations, and it certainly does differ from some of the futurist dreams of AR (see <a id="x3:y" title="Mok Oh's post hear on his disappointment in this regard" href="http://allthingsv.com/2009/09/03/you-know-what-really-grinds-my-gears-augmented-reality/">Mok Oh&#8217;s post here on his disappointment in this regard)</a>, it is significant that Total Immersion is committing to becoming a leader in mobile AR.</p>
<p>Our smart phones, the powerful networked sensor devices that so many people carry in their pockets, have proved themselves a &#8220;good enough for now&#8221;Â  mediating device for early manifestations of the ubiquitous computing and augmented reality base pair.Â  And now AR and ubicomp is mixed in theÂ  rich, messy soup of everyday life, commerce, business, marketing, art, entertainment, and government, we should get ready to see these technologies grow up fast, and unfold in some surprising ways that lab science didn&#8217;t necessarily predict.</p>
<p>And, perhaps, the new dialogue between scientists and entrepreneurs may spur both communities to outdo themselves.</p>
<p>Particularly, as <a href="http://programmerjoe.com/" target="_blank">Joe Ludwig</a> notes: &#8220;It seems to me that the biggest disconnect between the academics and the entrepreneurs is that they disagree on how far we are from the finish line.&#8221;</p>
<p>See the comments&#8217;s on Ori Inbar&#8217;s post, <a title="Augmented Reality Entrepreneurship: Natural Evolution or IntelligentÂ Design?" rel="bookmark" href="http://gamesalfresco.com/2009/09/22/augmented-reality-entrepreneurship-natural-evolution-or-intelligent-design/">Augmented Reality Entrepreneurship: Natural Evolution or IntelligentÂ Design?</a>, forÂ  a courteous but spirited discussion on the potential benefits and frictions of the newly expanded AR community ofÂ  researchers andÂ  entrepreneurs.</p>
<p>As <a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank">Blair MacIntyre </a>(see my long conversation with Blair<a href="http://www.ugotrade.com/2009/06/12/mobile-augmented-reality-and-mirror-worlds-talking-with-blair-macintyre/" target="_blank"> here</a>) notes:</p>
<p><strong>&#8220;not all academics and researchers are only interested in the traditional models of impact. Case in point: I wouldnâ€™t be building unpublishable games, nor investing so much time talking to the press, entrepreneurs and VCs if I did not believe strongly in the value of the impact I am having by doing that â€” and I know others with the same attitude.&#8221;</strong></p>
<p>In this vein, check out the Marble Game (<a href="http://www.youtube.com/watch?v=6AKgH4On65A&amp;feature=player_embedded" target="_blank">video here</a>) developed by Steve Feiner and his team at Columbia U. It&#8217;s enabled by Goblin XNA, an open source AR framework built on top of Microsoft&#8217;s XNA, which powers XBox live games, Zune games, and some Windows games. For more about Goblin XNA and AR from Columbia U <a href="http://graphics.cs.columbia.edu/projects/goblin/index.htm" target="_blank">see here</a>.Â  (Hat tip to <a href="http://www.oreillynet.com/pub/au/125" target="_blank">Brian Jepson</a> for this link)</p>
<p><a href="http://www.youtube.com/watch?v=6AKgH4On65A&amp;feature=player_embedded" target="_blank"><img class="alignnone size-medium wp-image-4528" title="Screen shot 2009-09-26 at 5.16.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-5.16.56-PM-300x182.png" alt="Screen shot 2009-09-26 at 5.16.56 PM" width="300" height="182" /></a></p>
<p>While we are still waiting for the kind of sexy AR specs &#8211; nothing totally game changing in <a href="http://gigantico.squarespace.com/336554365346/2009/9/20/eye-for-an-iphone.html" target="_blank">Gigantico&#8217;s AR eyewear rounup</a> (<a href="http://appft1.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&amp;Sect2=HITOFF&amp;d=PG01&amp;p=1&amp;u=%2Fnetahtml%2FPTO%2Fsrchnum.html&amp;r=1&amp;f=G&amp;l=50&amp;s1=%2220080088937%22.PGNR.&amp;OS=DN/20080088937&amp;RS=DN/20080088937" target="_blank">maybe note this Apple patent</a>), that might get wide adoption. But at least researchers are not afraid to explore the possibilities of AR Goggles.</p>
<p>But how far are we now, with or without sexy goggles,Â  from a fuller expression of the base pair DNA of ubiquitous computing and augmented reality?</p>
<h3>We may have a LAN of things before we have an Internet of Things</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_345g9bxbwd3_b1.jpg"><img class="alignnone size-medium wp-image-4534" title="dhj5mk2g_345g9bxbwd3_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_345g9bxbwd3_b1-300x199.jpg" alt="dhj5mk2g_345g9bxbwd3_b" width="300" height="199" /></a></p>
<p><em>The picture above is a workshop I attended at <a href="http://confluxfestival.org/2009/about/" target="_blank">Conflux</a> last weekend &#8211; <a href="http://confluxfestival.org/2009/events/workshops/natalie-jeremijenko/" target="_blank">Fish â€˜n microChips</a>, with <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko.</a> We are at the site of the <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibious Architecture</a> project (a commissioned work for <a href="http://www.sentientcity.net/exhibit/?cat=3" target="_blank">Toward the Sentient City</a>) and &#8220;a collaborative project with <a href="http://www.environmentalhealthclinic.net/environmental-health-clinic/" target="_blank">xClinic</a>, The Living and other intelligent creatures.&#8221;</em></p>
<p>We are probably as far off some grand futurist visions of ubiquitious computing as we are some of the futurist visions of augmented reality. But as it turns out that may not be a bad thing! Recently, <a href="http://twitter.com/mikekuniavsky" target="_blank">@mikekuniavsky</a> noted in a tweet:</p>
<p><span><span>&#8220;Another argument for the LAN of Things before the Internet of Things: <a rel="nofollow" href="http://tinyurl.com/lgp9uq" target="_blank">http://tinyurl.com/lgp9uq&#8221;</a></span></span></p>
<p><span><span>Bert Moore, <a href="http://www.aimglobal.org/members/news/templates/template.aspx?articleid=3553&amp;zoneid=24" target="_blank">in the article Mike linked to points out</a>, the grand vision of an &#8220;internet of things&#8221; with everything connected to everythingÂ  can &#8220;distract people from thinking about the benefits of RFID in smaller, more easily implemented and cost-justified applications.&#8221;Â  The same argument I think applies to sensor networks and augmented reality.</p>
<p></span></span></p>
<p>In New York City, a series of commissioned works for the <a href="http://www.archleague.org/" target="_blank">Architectural League of New York&#8217;s</a> exhibit,<em> </em><a href="http://www.sentientcity.net/exhibit/?cat=3" target="_blank">&#8220;Toward the Sentient City&#8221;</a><em> </em>are giving us the opportunity to dip our toes into the ocean of a &#8220;networked urbanism.&#8221; Â  For only a small budget, two of the <a href="http://www.sentientcity.net/exhibit/?cat=4" target="_blank">five commissioned works</a>, <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibeous Architecture</a> and <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a> demonstrate how sensor networks can allow us to explore new kinds of communities &#8211; connecting people to environments in interesting ways to create new forms of social agency.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">&#8220;Amphibeous Architecture</a>&#8221; -Â  from The Living Architecture Lab at Columbia University Graduate School of Architecture, Planning and Preservation (Directors David Benjamin and Soo-in Yang) and Natalie Jeremijenko, Environmental Health Clinic at New York University, uses a skillfully built (electronics and water are notoriously hard to mix) array of partially submerged sensors to pierce the blinding, reflective surfaces of the riversÂ  surrounding Manhattan and to create a new two way relationship with the ecosystem below &#8211; the water, our neighbors the fish and even a beaver that lives in the water surrounding Manhattan.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.34.56-PM.png"><img class="alignnone size-medium wp-image-4536" title="Screen shot 2009-09-26 at 6.34.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.34.56-PM-300x125.png" alt="Screen shot 2009-09-26 at 6.34.56 PM" width="300" height="125" /></a></p>
<p><em>Image from <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Toward the Sentient City</a></em></p>
<p>In a similar spirit, &#8220;<a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>&#8221; &#8211; Usman Haque, creative director, Nitipak â€˜Dotâ€™ Samsen, designer, Ai Hasegawa, designer, Cesar Harada, designer, Barbara Jasinowicz, producer, creates a network of people and electronically assisted plants to explore what it takes to work together on energy consumption and to experience the consequences of &#8220;selfish&#8221; and &#8220;unselfish&#8221; behavior interactively before it is too late to modify our actions.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.55.29-PM.png"><img class="alignnone size-thumbnail wp-image-4537" title="Screen shot 2009-09-26 at 6.55.29 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.55.29-PM-150x150.png" alt="Screen shot 2009-09-26 at 6.55.29 PM" width="150" height="150" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.37.06-PM.png"><img class="alignnone size-thumbnail wp-image-4548" title="Screen shot 2009-09-26 at 9.37.06 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.37.06-PM-150x150.png" alt="Screen shot 2009-09-26 at 9.37.06 PM" width="150" height="150" /></a></p>
<p><em>The &#8220;Greedy Switch</em>&#8220;<em> from <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse </a>on the left. On the right &#8220;The System&#8221; &#8211; click to enlarge.<a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank"></p>
<p></a></em></p>
<p>Much more to come in another post on these works, and &#8220;Toward the Sentient City.&#8221;Â  Also an update on how <a href="http://www.pachube.com/">Pachube</a> &#8211; an important part of both these projects and a very important contribution to ubiquitous computing because it creates the opportunity to connect environments and create mashups from diverse sensor data feeds &#8211; has matured since my interview with Pachube founder, Usman Haque, <a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">&#8220;Pachube, Patching the Planet,&#8221;</a> in January this year.</p>
<p>In the picture above <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko</a>, and <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">Jonathan Laventhol</a> give the <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibious Architecture</a> sensor array a last look over, as it will soon be lowered into the East River. Jonathan is on a busman&#8217;s holiday to help out at the pre launch of Amphibious Architecture, nr Manhattan Bridge, NYC.</p>
<p>I was very happy to getÂ  a chance to talk to <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">Jonathan Laventhol </a>- more on our conversation in another post<em>. </em>Jonathan Laventhol is <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">CTO of Imagination &#8211; one of the world&#8217;s leading design, events, and branding agencies.</a> We talked about the importance ofÂ <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank"> Pachube</a>, which Jonathan called the &#8220;The Facebook of Data,&#8221;Â  andÂ  how the <strong>symbiosis between brands and augmented reality</strong>, and healthcare applications, wouldÂ  be key to augmented reality emerging into the mainstream.</p>
<p><em><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_340djvd2thc_b.jpg"><img class="alignnone size-medium wp-image-4453" title="dhj5mk2g_340djvd2thc_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_340djvd2thc_b-235x300.jpg" alt="dhj5mk2g_340djvd2thc_b" width="235" height="300" /></a></em></p>
<p>Natalie Jeremijenko&#8217;s workshop at Conflux on the social negotiation of technology and how <a href="http://speedbird.wordpress.com/my-book-everyware-the-dawning-age-of-ubiquitous-computing/" target="_blank">&#8220;everyware&#8221;</a> can give us the chance to experience new forms of agency and connection was a totally inspiring.Â  And I will cover this too in another post.Â  I have so much awesome stuffÂ  to write about at the moment!</p>
<p>None of the projects in, &#8220;Toward the Sentient City,&#8221; included a mobile augmented reality, or &#8220;magic lens&#8221; component, but they all pointed to why &#8220;enchanted windows into our newly inside-out reality&#8221; are going to be so important. And why the DNA base pair of ubicomp and augmented reality can really do stuff that matters.</p>
<h3>Shangri- La &#8211; &#8220;Transfigured City&#8221;</h3>
<p><a href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/"><a href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_342g43n6w7k_b.png"><img class="alignnone size-medium wp-image-4452" title="dhj5mk2g_342g43n6w7k_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_342g43n6w7k_b-300x249.png" alt="dhj5mk2g_342g43n6w7k_b" width="300" height="249" /></a></a></a></p>
<p>Screenshot from <a href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">Shangri-La</a> episode </em><a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a></p>
<p>In my AR Consortium founder member interview series, I have found that, understandably, the visionary founders of these first augmented reality companies are a little reticent about sharing their full vision.Â  They are basically on stealth mode in this regard.Â  So as you will not, from my interview with <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> founder and CEO, Bruno Uzzan, get a fully drawn scenario of his vision for a next generation of shared augmented reality experiences, here&#8217;s a really interesting anime episode from the anime Shangri La called, <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a>, to mull over instead.</p>
<p>As you can tell from this rather long and circuitous intro to my my conversation with Bruno Uzzan, IÂ  have been investigating shared augmented realities pretty intensively recently. And Mike Kuniavsky pointed me to <em><em><a href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">Shangri-La</a></em></em>, and<a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank"> Transfigured City</a>, in a conversation with Mark Shepard, after Mark&#8217;s presentation at Conflux, <a href="http://confluxfestival.org/2009/events/workshops/mark-shepard/" target="_blank">Sentient City Survival Kit.</a></p>
<p><a href="http://thingm.com/about-us/team/mike-kuniavsky.html">Mike Kuniavsky</a> with <a href="http://thingm.com/about-us/team/tod-e-kurt.html">Tod E. Kurt</a> is founder of <a href="http://thingm.com/home.html" target="_blank">ThingM</a>, a ubiquitous computing device studio. Also Mike Kuniavsky researches, designs and writes about people&#8217;s experiences at the intersection of technology and everyday life &#8211; see Mikes blog <a href="http://www.orangecone.com/" target="_blank">Orange Cone</a>.Â  And I interviewed Mike at Etech- see<a href="../../2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/" target="_blank"> here</a>.</p>
<p>In <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a>, the &#8220;Metal Age&#8221; group has to figure out how to share and communicate in a city transfigured by augmented realities/virtualities, where no-one sees the same place in the same way.Â  Only one character can figure out from her previous experience of the city the relationship between the transfigured city and how it used to be.</p>
<p>The conversation I had with <a href="http://www.orangecone.com/" target="_blank">Mike Kuniavsky</a> on <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">The Transfigured City</a> continued at a picnic in Washington Square Park the next day with Elizabeth Goodman, who I met at Etech when she gave a brilliant presentation, <a id="eag1" title="Designing for Urban Green Space" href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Designing for Urban Green Space</a>.Â  We covered so many areas at the picnic related to ubiquitous computing and augmented realities that this conversation probably deserves a post of its own (my writing to do list is growing longer!).</p>
<p><a id="on28" title="The Plot Synopsis for Shangri La" href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">The Plot Synopsis for Shangri La</a>:</p>
<p><strong>&#8220;In the mid-21st century, the international committee decided to forcefully reduce CO2 emission levels to mitigate the global warming crisis. As a result, the economic market was transferred mainly into the trade of carbon. A great earthquake destroys much of Japan, yet the carbon tax placed on the country is not lifted, so Tokyo is turned into the worldâ€™s largest &#8220;jungle-polis&#8221; that absorbs carbon dioxide. Project Atlas is commenced to plan the rebuilding of Tokyo and oversee the government organization, which the Metal Age group opposes due to its oppressive nature. However, Atlas is only built with enough room for 3,500,000 people and most people are not allowed to migrate into the city. The disparity between the elite within Atlas and the refugees living in the jungles outside of its walls set up the background of the story.&#8221;</strong></p>
<p><strong></p>
<p></strong></p>
<p><a name="jumpto"><span style="font-size: medium;"><strong> Talking With Bruno Uzzan</strong></span></a></p>
<p><span style="font-size: medium;"><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/BrunoUzzanpost.jpg"><img class="alignnone size-medium wp-image-4494" title="BrunoUzzanpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/BrunoUzzanpost-225x300.jpg" alt="BrunoUzzanpost" width="225" height="300" /></a></p>
<p></strong></span></p>
<p><strong></p>
<p>Tish Shute:</strong> We won&#8217;t have fully opened the Pandora&#8217;s Box of Augmented Realities until we have ubiquitous, shared augmented realities, will we?</p>
<p><span id="p-xo" title="Click to view full content"> <strong>Bruno Uzzan: Yes. The most important for augmented reality is the experience we want to share. Now we are working on the cell phone, we can potentially do some marketing components that we already have developed now on cell phone. Done. Itâ€™s working.</strong></span></p>
<p><strong>But the most interesting part of it is how these new components [cell phone AR] will be used for marketing campaigns by brands. And we are also pretty much well positioned to transform some of the AR that we currently have working on Mac and PC and to transform these to applications working on mobile devices. </strong></p>
<p><strong>Tish Shute: </strong> We havenâ€™t really experienced yet what it means to actually share mobile AR experiences?</p>
<p><strong>Bruno Uzzan: Itâ€™s hard &#8212; we did a Facebook app. Itâ€™s a first try, it has a way to go.Â  But </strong><span id="c8ek" title="Click to view full content"><strong> to go more and more into social, is the way forward for us &#8211; to share and expand AR experiences. But yes, I mean what youâ€™re seeing is how two people on two different applications can share that same expanse.Â  For sure we are going in that direction. We are currently working on those kind of solutions. How people can share and experience together at the same time. Thatâ€™s how we start creating excitement in augmented reality, and itâ€™s coming up.</strong></span></p>
<p><strong>It&#8217;s a new market and thereâ€™s so much more in store for augmented reality. You know, some people are telling me, donâ€™t you believe that augmented reality is a gimmick? It will be a trend for a few weeks or a few months and then gone? I say, youâ€™re kidding me. This is only the beginning. I mean I can assure you that the applications that are on the market today are one percent of what we will have five years from now.</p>
<p></strong></p>
<p><strong>Tish Shute: </strong>I agree.</p>
<p><strong>Bruno Uzzan: And Iâ€™m sure that augmented reality will be a part of a lot of components that we are currently using today &#8211; GPS, web browser, glasses, I mean there are so many applications that will come up shortly. This is only the beginning. Iâ€™m completely convinced that augmented reality will be in three years from now what virtual reality is today, which is a billion dollar market.Â  I know that itâ€™s not just a gimmick of a few weeks or a few months, because so many brands are jumping into it, spending money, exploring solutions.Â  I know that itâ€™s not just short term -what they are willing to do and we are willing to do, but also middle and long term. And thatâ€™s what makes this adventure pretty much unique and what makes creating a cutting edge technology, very, very much exciting for us.</p>
<p></strong></p>
<p><span id="pb9s" title="Click to view full content"><strong>Tish Shute:</strong> First could you explain more to me about your partnership with Int13. I am not sure I understand what is in the arrangement from Total Immersion&#8217;s POV. I mean what happens re your own mobile software development? Haven&#8217;t you only been licensed the Int13 SDK for a limited period of time and have limited access to all it&#8217;s power? </span><span id="p_2y" title="Click to view full content"><a href="http://gamesalfresco.com/2009/09/15/why-int13-got-in-bed-with-total-immersion/" target="_blank">Stephane from Int13 said to Ori on Games Alfresco, here, </a>â€œwe have licensed the SDK4 for two years,â€ and then Ori asks, â€œbut you have basically kept the power to yourselves, right?â€ So if they are the only ones that can enhance it and develop the software, where willÂ  TI be in two years in mobile if you havenâ€™t really had the chance to develop your own software .</span></p>
<p><span id="j5co" title="Click to view full content"></p>
<p><strong>Bruno Uzzan: Actually itâ€™s a real win-win situation. Int13 is a very small company and they have so many requests they can&#8217;t possibly fulfill them all. SoÂ  this is a way for both of us to be, as quickly as possible, the first mobile provider for all the requests we have. Also they give us exclusivity so nobody else can use INT13 SDK for such applications.Â  I think that it is a good partnership, </strong></span></p>
<p><strong>And concerning our own mobile applicationâ€¦ First of all we have currently some mobile applications working. But with Int13 we have a mobile solution that can work on many different devices. Thatâ€™s a fact and thatâ€™s working. And, believe me you will hear from us a lot more about this soon. We are fully independent on our mobile development. The reason we closed the partnership with Int 13 isÂ  to be able to deploy mobile in a broad way.</strong></p>
<p><strong> I mean you know that the difficulty with AR mobile is that each separate device needs some customization. Working on the iPhone is different from working on the Nokia, different from working on the Palm; itâ€™s different from working on the Samsung. Each of them have their own operating system inside and so we were interested in Int13&#8242;s very clever embedded solution that allows our solutions to work across many platforms.</strong></p>
<p><strong>The reason we are working with Int13 is that we are able to work on so many mobile devices, thanks to Int13. And in the mobile AR race that we are currently in, the next two years will be extremely important to usâ€¦</strong></p>
<p><span id="z_5s" title="Click to view full content"><strong>Tish Shute:</strong> OK, that definitely clarifies it a lot. So Int13 has done an embedded solution to allow TI developed AR solutions to work easily across many devices?</span></p>
<p><span id="y.wt" title="Click to view full content"><strong>Bruno Uzzan: YesÂ  they have kind of an embedded solution, a way to address extremely quickly new cell phone&#8230; But, currently on our side, we are in discussions with a mobile companyâ€¦ and that only refers to some very specific mobile devices.Â  And what they have is also a way to embed deeper our technology into mobile, so that we can have quickerâ€¦ applications that work on a large number of cell phones.</strong></span><span id="mufh" title="Click to view full content"> </span></p>
<p><strong>Tish Shute:</strong> So, basically it means you don&#8217;t have to go through some complicated negotiations with each of the cell phone companies, is what you are saying?</p>
<p><strong>Bruno Uzzan: Not only negotiations, but also hard development. You know? Working on the Windows mobile is completely different from working on the Palm OS. You know, that&#8217;s different! Its a big work, to have a mobile application working on many other devices. So, INt13,Â  provides us a way for us to save some time and some development cost too.</strong></p>
<p><strong>Tish Shute:</strong> And Int13 doesn&#8217;t have powerful AR development tools like <a href="http://www.t-immersion.com/en,interactive-kiosk,32.html" target="_blank">D&#8217;fusion</a> right?</p>
<p><strong> Bruno Uzzan: Right! That&#8217;s right. That&#8217;s why we say it&#8217;s a true win-win solution. They can benefit from our work too. And we can benefit from their work, in order to deploy quicker and faster mobile solutions. </strong></p>
<p><strong>Tish Shute:</strong> Now, the second thing isâ€¦ there is a lot of debate and disagreement about how far mobile augmented reality is from delivering something more that the &#8220;post it&#8221; approach that has been much publicized in recent months, via all the AR browser apps.</p>
<p>But from my understanding from the conversation we had earlier this summer (see below), Total Immersion is targeting a much higher level of mobile augmented reality than we&#8217;ve seen to date?</p>
<p><strong>Bruno: Yes the browser apps we have seen are a kind of augmented reality, but not exactly the way we see it. Let me explain you why. With this kind of application it&#8217;s true that you can overlay 3D-information and video. That&#8217;s a fact. So, in a sense, that&#8217;s augmented reality. But the way that they are working on the position of the 3D on that video is that they are using compass and GPS-information.. so it means that this AR solution will work only on some building and some physical objects that are FIXED. In a fixed and known position.</strong></p>
<p><strong>So you want to go to a theater?</strong></p>
<p><strong> </strong><span id="a9qv" title="Click to view full content"><strong>The theater is here, for sure it will not move, so you know the position of the theater, and thatâ€™s a fact that you can superimpose an object on the theater. Thatâ€™s what can be done currently. What we are achieving and what we are doing on mobile is more than that. We want to be able to port our solution with trading cards, with brands, into a smart phone.</strong></span></p>
<p><strong>Iâ€™m assuming that you want a can, a drink can, to be able to trigger an experience. The only way you can do it is to be able to understand what the can, it is. And the current solutions that are out there canâ€™t do that, itâ€™s impossible. </strong></p>
<p><strong>Tish Shute:</strong> Right, yes. Thereâ€™s no near-field object at all in these early browser apps.</p>
<p><strong>Bruno Uzzan: And the solution we have is that we can recognize a can and then &#8212; in a very, very precise way and that activates geo-location, so we can superimpose 3D. I mean in that case, it opens up all the applications that we currently have, so they could work on mobile.</strong></p>
<p><strong>Tish Shute:</strong> So for example, if youâ€™re working with a soft drink company, people can trigger that experience wherever they see that can?</p>
<p><strong>Bruno Uzzan: Correct. </strong></p>
<p><strong>Tish Shute:</strong> Yes. Yes, I assumed that was what youâ€™re doing</p>
<p><strong>Bruno Uzzan: We believe &#8212; and maybe thatâ€™s not the case, but we believe that our marker-less tracking technology is pretty much unique on the mobile devices.</strong></p>
<p><strong>I havenâ€™t seen yet, from anyone, a full augmented reality mobile solution working.</p>
<p></strong></p>
<p><span id="rzqr" title="Click to view full content"><strong>I really see AR being part of the Web 3.0 next generation. I mean the vision I have is that, you know &#8212; today, when you want to have information, you go on a website and then you find your information. AR &#8212; and the future is that I think it will be part of the opposite. You want to have information about a product, you just show it to your computer and the information will automatically pop up. I see here a new way to market some key messages, a new way to get information is that some physical product by themselves could be a way to get information, and you donâ€™t have to search anymore for them, itâ€™s coming out to you.</strong></span></p>
<p><strong>AR is definitely for me, one of these components. Another thing that AR is a solution, another thing that AR itself will create these kind of results in how information is being displayed. But Iâ€™m seeingÂ  here a way that could be part of a new way to have access to information. And thatâ€™s part of the vision I have. Whatever, if it is through mobile phone or web or PC, Mac, whatever, I really believe that now this kind of new generation of receiving information will come shortly and could be a kind of a new &#8212; could be part of the new 3.0 generation of the web. </strong></p>
<p><strong>Tish Shute:</strong> My friend <a id="evae" title="Gene Becker" href="http://www.genebecker.com/" target="_blank">Gene Becke</a>r did <a href="http://www.genebecker.com/2009/09/thinking-about-design-strategies-for-magic-lens-ar/" target="_blank">an interesting post recently on some of the current limitations of mobile AR</a> where he pointed out the problem of:</p>
<p><em><strong>&#8220;S</strong><strong>implistic, non-standard data formats</strong> â€“ POIs, the geo-annotated data that many of these apps display, are mostly very simple one-dimensional points of lat/long coordinates, plus a few bytes of metadata. Despite their simplicity there has been no real standardization of POI formats; so far, data providers and AR app developers are only giving lip service to open interoperability. Furthermore, they are not looking ahead to future capabilities that will require more sophisticated data representations. At the same time, there is a large community of GIS, mapping and Geoweb experts who have defined open formats such asÂ <a href="http://georss.org/" target="_blank">GeoRSS</a>,Â <a href="http://geojson.org/" target="_blank">GeoJSON </a>andÂ <a href="http://code.google.com/apis/kml/documentation/" target="_blank">KML</a> that may be suitable for mobile AR use and standardization.&#8221;</p>
<p></em> <span id="gd8y" title="Click to view full content"></p>
<p><strong></p>
<p></strong></span><span id="v68s" title="Click to view full content"><strong> Bruno Uzzan: Thatâ€™s interesting. I mean &#8212; I know exactly what his is referring to. He is mainly referring to a localization and how you can have a quick, accurate localization.Â  If you look at current solutions, and you look at this 3-D superimposing on the video, the 3-D is shaking a lot. I donâ€™t know if you see that in some of these early efforts.</strong></span></p>
<p><strong>Itâ€™s hard to use because the 3-D, you know, isÂ  part of the magic of augmented reality, that is when the 3-D is being inserted in a very easy way and smooth way in your solution. Here, when you see this overlay, 2-D or 3-D overlaid on the video, itâ€™s shaking a lot. One reason for this is that the GPS compass is not accurate enough to coordinate the perfect location of the user. And here, what Gene says is interesting. I think we are addressing this localization issue in a pretty smart way.</strong></p>
<p><strong>But to be frank with you, I donâ€™t believe mobile augmented reality in the extremely short term &#8212; Iâ€™m talking about three weeks, one, two months is mature enough for good AR applications.Â  It will be shortly.Â  But for now it is more proof of concept than a true and easy application to use. </strong></p>
<p><strong>But we are starting to see a lot of new application coming out, but I really believe that marketing and entertainment are the two key markets for AR right now.</strong></p>
<p><strong>Iâ€™ve been working ten years in augmented reality. And, eight years ago, when I was talking about augmented reality, I was E.T., you know? Nobody understood what I said, and I thought it was crazy. And now, today, yes itâ€™s completely different.</strong><strong> </strong></p>
<p><strong> </strong></p>
<p><strong>Tish Shute:</strong> The Pandora&#8217;s Box of Augmented Realities, in my view, is an open, universal and standard, distributed, multiuser, augmented reality framework fully integrated with the internet and world wide web. I have been looking into Google Wave protocols as a basis for this would you be interested in this? Do you think it is feasable?</p>
<p><span id="ngwf" title="Click to view full content"> </span><span id="vz68" title="Click to view full content"><strong> </strong></span></p>
<p><span id="vz68" title="Click to view full content"><strong>Bruno Uzzan: I think this is feasible. I think that&#8217;s doable, that&#8217;s justÂ  in my opinion. I mean some people might have another kind of opinion but I think that that&#8217;s definitely doable.</strong></span></p>
<p><strong>Tish Shute:</strong> Yes I suppose an open AR Framework involves cooperation and collaboration, it is more about business and politics than technological problems.</p>
<p><strong> Bruno Uzzan: Yes!Â  Actually the Web is politics. Business is politics. </strong></p>
<p><span id="yeg4" title="Click to view full content"><strong>Tish Shute: </strong>I would be interested if anyone in your R&amp;D team would be interested in looking at some of the ideas that are emerging in our little discussion of Google Wave and an Open AR FrameworkÂ  to offer feedback. it is an interesting time now to input on the Wave Federation Protocol docs because nothing is set it stone right now.</span></p>
<p><span id="hzrf" title="Click to view full content"><strong>Bruno Uzzan: Just shoot me an email, I&#8217;ll try to put you in touch with the right person and, and a team member that can input on this.</strong></span></p>
<p><span id="hbcd" title="Click to view full content"><strong>Tish Shute: </strong>For mobile augmented reality the best thing weâ€™ve got now is the phone, right?</span></p>
<p><strong>Bruno Uzzan: Right. </strong></p>
<p><strong>Tish Shute:</strong> And the only way we can use the phone is by holding it up, right?Â  Isnâ€™t this a bit of an an obstacle as you introduce better object recognition and tracking?Â  People are going to have to stop moving to use their phone. What do you feel about that experience? Isn&#8217;t AR eyewear and essential part of a tightly registered AR experience?</p>
<p><strong></p>
<p>Bruno Uzzan: </strong>We donâ€™t do hardware and we donâ€™t have the current solution for eyewear that would do all we need for a good mobile AR experience, so I guess we donâ€™t have the current answer for that.Â  But we are beginning to see the next generation of this &#8212; of these glasses.</p>
<p><strong>Tish Shute:</strong> But youâ€™re happy enough with the mobile experience of augmented reality on smart phones that youâ€™re investing in this next generation of software for this.</p>
<p><strong>Bruno Uzzan: Yes, I know. We know that some application will not work on the iPhone. And yes, whatever you do, you still need to hold the iPhone, so it means that you canâ€™t play with your hands anymore. So we know that partially, some AR solutionsÂ  we have on other platforms will lose the magical effectivities on just the iPhone.</strong></p>
<p><strong>But Iâ€™m starting to see on the market some glasses that could perhaps be not too expensive &#8212; thatâ€™s a challenge!Â  And easy to use &#8212; thatâ€™s another big challenge. And, that could fit on anybodyâ€™s faces and head &#8212; there&#8217;s another big challenge. So yes, Iâ€™m starting to see that, but so far AR glasses are only applicable for some very, very specific application, like design or theme park or, you know, some specific location where it makes sense to move forward with glasses.</p>
<p></strong></p>
<p><strong>I donâ€™t believe that kids will use glasses for &#8212; in our toys and for games in the next months or maybe othe next one or two years. But maybe something will come out shortly and that could be a big breakthrough, and enable us to think another way. ButÂ  from what we have seen so far and from what we know in this hardware market, I donâ€™t believe that currently there is a workable solution.</p>
<p><span style="font-size: small;"></p>
<p></span></strong> <span style="font-size: small;"><strong></p>
<p></strong></span><span style="font-size: medium;"><span style="font-size: small;"><strong>Note: The following section of the interview took place earlier in the Summer.</strong></span></p>
<p></span><span id="yvdi" title="Click to view full content"></p>
<p><strong>Tish Shute:</strong> You are the first commercial AR companyÂ  &#8211; you started in 1999 right?</p>
<p><strong></p>
<p>Bruno Uzzan: Yes you are right. We started the extremely early in this augmented reality market. We were the first company worldwide to start doing augmented reality and to start promoting augmented reality. So it&#8217;s true, we are pretty old players although the market has been getting bigger and bigger for the last year and a half. So for a long time we were only in the market, and the market was not really there.</strong></span></p>
<p><strong>But for the past 8 months, the company has been growing really fast.</strong></p>
<p><strong>Tish Shute:</strong> Yes I&#8217;m sure. Congratulations for hanging in there long enough to get the pay off!</p>
<p><strong> Bruno Uzzan: You know, my background is Financial. So I have been driving the company for many years in a very cash efficient way. So we have been waiting for the markets to reach maturity before starting make some investments. So that&#8217;s the reason we are still here, and that&#8217;s the reason I think we managed pretty smartly the cash that we raised for the company.</strong></p>
<p><strong>Tish Shute:</strong> Yes there is a saying that when a market takes off you can tell a pioneers because they are the ones with the arrows in their backs. But I am glad you are dodging the arrows!</p>
<p><strong>Bruno Uzzan: You know, I&#8217;ve always driven the company with revenue. And because revenue was not there at the beginning I was extremely cautious about the cash. So now that the company is getting some revenue, for sure we are making more and more investments, and taking advantage of our situation as a worldwide leader of augmented reality.</strong></p>
<p><strong>This situation is not easy as it appears today but it&#8217;s now getting better, as you can see, AR, Augmented Reality, has very good momentum and we are benefiting a lot from all this momentum for augmented reality right now.</strong></p>
<p><strong>Tish Shute:</strong> You&#8217;ve been very involved in researching developing augmented reality tools. Are you still as active in the research area, or are you too busy keeping up with work for hire now, to be working on research and building new technology for Augmented Reality?</p>
<p><strong>Bruno Uzzan: Both. First of all, we are part of lot of projects either directly with clients like Mattel or with some partners that are using our technology to promote and develop other AR projects. From what we he have seen, many, many, many, projects augmented projects have been done currently with our solutions.</strong></p>
<p><strong>To continue with your previous question. So we are being perceived as this leader in that space, and weÂ  have some pretty heavy demand for our services. But we are coming up with new technology, of course, still connected to Augmented Reality.Â  But, our R &amp; D is working in two different directions, which of course also bind together.</strong></p>
<p><strong>The first one is platform developments. So we want </strong><strong>Augmented Reality to work with as many platforms as possible &#8211; PC, Mac, Mobile, Game Consoles, all those are the platforms that we are targeting. We are currently doing lot of work in the R &amp; D team in cross platform compatibility</strong><strong>.</p>
<p></strong></p>
<p><strong>Tish Shute:</strong> Robert Rice said recently, &#8220;markers and webcams equal Photoshop page curls&#8230;&#8221;</p>
<p><span id="dulu" title="Click to view full content"></p>
<p><strong>Bruno Uzzan: Yes. There are so many concerns with markers. The quality is extremely bad. As soon as you hide a part of the marker, a slight part of the marker, youâ€™re dead. You canâ€™t track any more of the object. So compared to our solution where I want to say play with cards or where you are going to play with a Mattel toy, even if you hide a part of the toy, itâ€™s still working.</strong></span></p>
<p><strong> Tish Shute:</strong> But you havenâ€™t offered the public an SDK to your engine right? Basically the way people get access to your tools is working in a partnership with Total Immersion right?</p>
<p><strong>Bruno Uzzan: Correct. </strong></p>
<p><strong>Tish Shute:</strong> Do you think in the future you might open your SDK? Are you considering that?</p>
<p><strong></p>
<p>Bruno Uzzan: Yes, it would be interesting. </strong></p>
<p><strong>Tish Shute:</strong> So that is something we can see coming soon?</p>
<p><span id="short_transcription0" title="Click to view full content"><strong>Bruno Uzzan: Maybe, because itâ€™s true that Total Immersion is starting to be mature enough for these kind of tools. The only thing is that we have to respect good timing for that.Â  Itâ€™s a big decision. You know what I mean?Â  It is a big, big decision. We would then compete with others using our technology. </strong></span></p>
<p><strong>Tish Shute:</strong> Oh I know, it is a big decision when you have so much skin in the game! But it would be nice to have your SDK being THE platform for AR, wouldn&#8217;t it?</p>
<p><strong> Bruno Uzzan: It is a really big decision that we canâ€™t just take like that, you know.Â  There are a lot of friends who told me you have to be extremely careful about timing. This timing is pretty much connected to the maturity of the market. For sure, we see the market being more and more mature. But, there are a lot of low hanging fruits we still want to address</strong></p>
<p><strong>To get the best value possible for all the publicity we have and all the clients we have now. </strong></p>
<p><strong>Tish Shute:</strong> Yes, I know. Youâ€™ve been in this game so long. Now, there is an interesting question here though about tools and platforms because you know, A.R., augmented reality has already expandedÂ  beyond its kind of original purist definition. And when I talk to peopleÂ  about augmented reality, there are actually lot of different ideas and priorities of where the tools should go right now. You know, obviously we have these kind of browser-like applications, but these browser like applications are not dealing with recognizing near field objects yet.Â  What are your priorities for tool development and what are your priorities for AR development in the future? What areas are you going to focus on? Oh dear that is a rambling question!</p>
<p><strong>Bruno Uzzan: [laughter]Â  So, one of our first priorities is we need to create our software with one development, one installer, one software that can be spread on different platforms. The same application, the same software can be used either on a PC, Mac, phone or console. So thatâ€™s a lot of work, because that means that our platform has to address many many different devices and thatâ€™s a big priority for us because we received this request from our clients. We want to be able to use one application on many different platforms and devices. So, thatâ€™s the first one.</p>
<p></strong></p>
<p><strong><span id="hk3z" title="Click to view full content">And the second one is to add more and more interactivity between the real and the virtual world. So, we are working on some improvements to add some real components that will interact with virtual, and that also part of our big strategy and direction and these two worlds can more and more be bridged together, linked together so they can interactÂ  one with the other.</span></strong></p>
<p><strong>Our R&amp;D guys are working on the real world interacting more with the virtual world.Â  And I have started seeing some results which are pretty much crazy and this will be ready for next year.</p>
<p><br style="background-color: #ffff00;" /></strong><span id="b1qt" title="Click to view full content"><strong> There are so many different directions for interaction between the real world and virtual world to develop.Â  Iâ€™m sure ten years from now youâ€™re going to have AR applications everywhere.Â  Its not just temporary fashion stuff or a gimmick for few months. I mean we are getting there, its getting stronger and stronger and we are getting a good adoption rate from our consumers. They like it, they test it, they play with it and brands wants more, people want more and its getting bigger and bigger.</p>
<p></strong></p>
<p><strong>Tish Shute:</strong> Yea and I totally agree, its not a gimmick because the interaction between &#8220;virtual&#8221; and &#8220;real&#8221; enhances the magic of both. Another question about you RandD operation. Is your R&amp;D still in France or have you moved totally out to LA.</span></p>
<p><strong>Bruno Uzzan: We are 50 people in France and I started this LA office two years ago and I moved permanently two years to LA. So Iâ€™m now permanently located in the US to take care of the US office, knowing that revenues are really getting bigger and bigger in the US. So it means that we are getting a lot of traction, working with large company and now Iâ€™m currently located in the US.</strong></p>
<p><strong>Tish Shute:</strong> My sister lives in Paris. Could I visit your R&amp;D lab at some point? Iâ€™d love to visit!</p>
<p><span id="bt1e" title="Click to view full content"><strong>Bruno Uzzan: Yeah sure sure sure. I mean if you want to go. You wonâ€™t have access to all the research. But if you want to go out and meet all the team please do.</strong></span></p>
<p><strong>Tish Shute:</strong> Iâ€™d love to.</p>
<p><strong> Bruno Uzzan: No problem. Shoot me an Email you and I will introduce you to Eric Gehl, COO, he is the COO of the French team. And he can definitely take care of that. </strong></p>
<p><strong>Tish Shute:</strong> That would be fun. Thank you!</p>
<p>Recently, AR browser applications have really caught the imagination of the web community, eg., Layar and Wikitude?Â  Where do you think the most important market for AR is at the moment<span id="k6fx" title="Click to view full content">, entertainment,Â  green tech, business, education?</span></p>
<p><strong>Bruno Uzzan: I think that all that you mention will be important. The first one that did grab my attention is entertainment particularly dual marketing, because they always searching for new ways to interact with players or the consumers.Â  But itâ€™s just the tip of the iceberg, you know, I mean medical applications could be huge using augmented reality. Education, and edutainment is definitely using more and more augmented reality components.Â  And I will just be submitting with big companies â€“ that are considering using augmentation for education. Museums are very important too. Also augmentation as a kind of free sales tool, you know there are so many applications, design, architecture &#8211; so many directions that itâ€™s hard to say today which one will take the lead.</strong></p>
<p><strong>But I do believe that on the short term the ones that are really really moving fast are the entertainment business and the digital marketing business. </strong></p>
<p><strong>Tish Shute:</strong> What do you think are the biggest shortcomings with current augmented reality and what are the obstacles that no one has solved yet?</p>
<p><strong>Bruno Uzzan: I think the cell phone is not fully ready for augmented reality â€“ a lot of people are working on that but there are still a lot of constraints to get the augmented reality working on a cell phone and I think that from what I heard a lot of manufacturers and a lot of companies are working from direction that are going to help us a lot to develop some great cell phone applications.</strong></p>
<p><strong>And I think thatâ€™s one of the biggest part of the game. All the applications that you see on cell phones so far are just gimmicks â€“ the next big key is how to transform some gimmick cell phone application to a real, industrial, robust application that&#8217;s going to work on a cell phone. So I think thatâ€™s a big challenge for this year. </strong></p>
<p><strong></p>
<p>Most of what we see now is just matching and overlaying some 2d components in a video. This is not what I call AR.Â  Youâ€™re far away â€“ with this kind of application, you are far away from doing the registration that we need to do â€“ you canâ€™t do it. So here&#8217;s the challenge: &#8220;how can you get a Topps is an application working on cell phone. Thatâ€™s the big challengeÂ  How we can make that work!&#8221;</strong> <strong> You can&#8217;t today get a real AR Topps application working on cell phone because there&#8217;s no cell phoneÂ  thatâ€™s actually ready. But we are working on it and the first one that can make that work, itâ€™s going to be huge.</strong></p>
<p><span id="b9-2" title="Click to view full content"><strong>When you are working with good AR components you need a lot of CPU and GPU programs. So today new cell phone have started to be more and more ready for augmented reality but you need a really good cell phone to make it work. You canâ€™t choose an old cell phone to make it work because you have some recognition, you have some tracking, you have some rendering, so you canâ€™t choose a Nokia cell phone two years old to make that work. For sure the newest iPhone is the one that can make it work, but thatâ€™s it for now. There is a lot of research â€“ from large cell phone companies â€“ to get more CPU and GPU into their cell phone.Â  But so far we are also waiting for these devices to be released to consumers.</strong></span></p>
<p><strong>Tish Shute: </strong>And the current economic climate has put a damper on MIDs hasn&#8217;t it. But who can tell? It depends what price points some new MID came out at right?</p>
<p><strong></p>
<p>Bruno Uzzan: Correct.</strong></p>
<p><strong>Tish Shute:</strong> Yes,I agree. But basically whatâ€™s interesting, the interesting thing is, the iPhone can deliver so much of what is necessary and even if Apple hasn&#8217;t given access to the full power of the iphone to AR developers yet, there is really no going back now &#8211; the mobile augmented reality cat is out of the bag!</p>
<p><strong>Bruno Uzzan: Youâ€™re right, youâ€™re fully right. </strong></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/feed/</wfw:commentRss>
		<slash:comments>36</slash:comments>
		</item>
		<item>
		<title>Twitter and The Web of Flow: Talking with Stowe Boyd &amp; Bruce Sterling about Microsyntax, Squelettes, Favela Chic and the State of Now</title>
		<link>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/</link>
		<comments>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/#comments</comments>
		<pubDate>Sun, 28 Jun 2009 18:23:28 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[#140conf]]></category>
		<category><![CDATA[Aaron Straup Cope]]></category>
		<category><![CDATA[aesthetics of streaming]]></category>
		<category><![CDATA[asymmetric follow]]></category>
		<category><![CDATA[asynchronous web versus synchronous web]]></category>
		<category><![CDATA[being a character]]></category>
		<category><![CDATA[bottom up informatics]]></category>
		<category><![CDATA[brian solis]]></category>
		<category><![CDATA[brightkite]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Bruce Sterling on Twitter]]></category>
		<category><![CDATA[Clay Shirky]]></category>
		<category><![CDATA[CNN and Twitter]]></category>
		<category><![CDATA[cross-links keywords and networks]]></category>
		<category><![CDATA[data shadows]]></category>
		<category><![CDATA[evolution of microsyntax]]></category>
		<category><![CDATA[Favela Chic]]></category>
		<category><![CDATA[favela chic and bottom up informatics]]></category>
		<category><![CDATA[geoslashes]]></category>
		<category><![CDATA[Google and Twitter]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[googlewave]]></category>
		<category><![CDATA[Gothic High Tech]]></category>
		<category><![CDATA[hash tags]]></category>
		<category><![CDATA[hash tags on Twitter]]></category>
		<category><![CDATA[high rise favelas]]></category>
		<category><![CDATA[hybrid vigor]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[Interactions Magazine]]></category>
		<category><![CDATA[Iran and Twitter]]></category>
		<category><![CDATA[iran election and Twitter]]></category>
		<category><![CDATA[Iranian Twitters]]></category>
		<category><![CDATA[Jack Dorsey]]></category>
		<category><![CDATA[Jeff Pulver]]></category>
		<category><![CDATA[Kevin Slavin]]></category>
		<category><![CDATA[Lars and Jens Rasmussen]]></category>
		<category><![CDATA[LIFT]]></category>
		<category><![CDATA[Lift Conference 2009]]></category>
		<category><![CDATA[magic words]]></category>
		<category><![CDATA[Mark Vanderbeeken]]></category>
		<category><![CDATA[Michael Jackson and Twitter]]></category>
		<category><![CDATA[Microsyntax]]></category>
		<category><![CDATA[Microsyntax and Twitter]]></category>
		<category><![CDATA[Microsyntax.org]]></category>
		<category><![CDATA[New Depression]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[pachube google wave and microsyntax]]></category>
		<category><![CDATA[Prada Goth]]></category>
		<category><![CDATA[real time search]]></category>
		<category><![CDATA[reboot11]]></category>
		<category><![CDATA[semantic web]]></category>
		<category><![CDATA[semweb]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[SMS messages in Iran]]></category>
		<category><![CDATA[social web]]></category>
		<category><![CDATA[Squelettes]]></category>
		<category><![CDATA[Stowe Boyd]]></category>
		<category><![CDATA[streamy aesthetics of sensors]]></category>
		<category><![CDATA[stuffed animals]]></category>
		<category><![CDATA[stuffed animals and failed states]]></category>
		<category><![CDATA[stuffed animals and regulatory capture]]></category>
		<category><![CDATA[The 140 Characters Conference]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[The Now Web]]></category>
		<category><![CDATA[The State of Now]]></category>
		<category><![CDATA[The Web of Flow]]></category>
		<category><![CDATA[Things That Twitter]]></category>
		<category><![CDATA[Tim O'Reilly on Google Wave]]></category>
		<category><![CDATA[Tish Shute]]></category>
		<category><![CDATA[Tweet Deck]]></category>
		<category><![CDATA[twitter]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[webthropology]]></category>
		<category><![CDATA[Wyclef Sean and Twitter]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3835</guid>
		<description><![CDATA[I met Stowe Boyd, of Microsyntax.org at Jeff Pulverâ€™s 140 Characters Conference which convened in the middle of a perfect storm for the State of NOW (more mundanely known as the real time web) as thousands of tiny Twitter pipes became a vital conduit for the historic events occurring in Iran (picture on left, Stowe [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/stoweboyd2.jpg"><img class="alignnone size-medium wp-image-3851" title="stoweboyd2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/stoweboyd2-296x300.jpg" alt="stoweboyd2" width="296" height="300" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/BruceSterlingAtReboot.jpg"><img class="alignnone size-medium wp-image-3971" title="BruceSterlingAtReboot" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/BruceSterlingAtReboot-297x300.jpg" alt="BruceSterlingAtReboot" width="297" height="300" /></a></p>
<p>I met <a href="http://www.stoweboyd.com/" target="_blank">Stowe Boyd,</a> of <a href="http://www.microsyntax.org/" target="_blank">Microsyntax.org</a> at Jeff Pulverâ€™s <a href="http://www.140conf.com/" target="_blank">140 Characters Conference</a> which convened in the middle of a perfect storm for <a href="http://pulverblog.pulver.com/archives/008934.html" target="_blank">the State of NOW</a> (more mundanely known as the real time web) as thousands of tiny Twitter pipes became a vital conduit for the historic events occurring in Iran (picture on left, Stowe Boyd, from <a href="http://www.briansolis.com/" target="_blank">Brian Solis</a>&#8216; Flickr <a href="http://www.flickr.com/photos/briansolis/3569544825/" target="_blank">here</a>, and on the right, Bruce Sterling, presenting at <a href="http://www.reboot.dk/" target="_blank">reboot11</a> from <a title="Link to scriptingnews' photostream" rel="dc:creator cc:attributionURL" href="http://www.flickr.com/photos/scriptingnews/">scriptingnews</a>&#8216; Flickr <a href="http://www.flickr.com/photos/scriptingnews/3662894176/" target="_blank">here)</a>.</p>
<p>But, <a href="http://blog.ted.com/2009/06/qa_with_clay_sh.php" target="_blank">as Clay Shirky pointed out,</a> re Twitter and Iran:</p>
<p><strong>â€œItâ€™s incredibly messy, and the definitive rules of the game have yet to be written. So yes, weâ€™re seeing the medium invent itself in real time.â€</strong></p>
<p>Stowe Boyd is  managing director of <a href="http://www.microsyntax.org/">Microsyntax.org</a>, a non-profit investigating the embedding of structured information within microstreaming applications, particularly Twitter. It is a communitarian project so if you are interested you should get involved &#8211; see Stoweâ€™s #140conf. presentation, <a href="http://blip.tv/file/2267166" target="_blank">â€œThe evolution of Microsyntax.&#8221;</a> Stowe is an architect of &#8220;flow&#8221; and a webthropologist of the State of NOW.Â  I had the opportunity to talk with him at the conference (<a href="#StoweInterview">see the full conversation below</a>). We talked not only about some of the practicalities of implementing microsyntax but about how &#8220;the web of flow&#8221; produces a fundamental shift in how we communicate, and who we are.Â  As Stowe Boyd put it:</p>
<p><strong> â€œYou use these tools, and you are changed. And itâ€™s just a question of how long you use them and the longer you use them, the more you use them, the more changed you are. When people shift to a basis of sociality around connection with other people as opposed to mass affiliation, itâ€™s different. Itâ€™s completely different. Your whole system of ethics, the way you judge the world and decide whatâ€™s important is different. And not only different itâ€™s better. Itâ€™s a better way to deal with the world.â€</strong></p>
<p>As Wyclef Sean (@<a href="http://twitter.com/wyclef" target="_blank">wyclef</a>) remarked at #140conf, <strong>â€œTwitter just cuts the middle man in everything.â€</strong></p>
<p>At the 140 Characters Conference it was hard not to be captivated by the energy and optimism arising from the successful use of Twitter by Iranians to communicate in the aftermath of the election.Â  But the subsequent repression in Iran, in which the regime took advantage of central infrastructure controls to silence Iranian twittering (we have similar network technologies in place here in the US), leaves a big question that came to the fore after the conference:</p>
<p>While these real time applications give us the ability to leverage network effects in totally new ways, and they have enormous potential to make our lives better, do we need to give more thought to the infrastructure they rely on?</p>
<p><a href="http://pulverblog.pulver.com/archives/008957.html" target="_blank">The videos for the 140Conf</a> are up now. If you havenâ€™t already seen them, after watching Jeff Pulverâ€™s intro to <a href="http://pulverblog.pulver.com/archives/008950.html" target="_blank">The State of NOW</a> a great place to start is the <a href="http://blip.tv/file/2260001" target="_blank">â€œTwitter as a News Gathering Toolâ€</a> (Part 2).Â  Also see <a href="http://www.observer.com/2009/media/cnns-rick-sanchez-todays-ann-curry-stand-their-twitter-iran-coverage" target="_blank">Ann Curry Defends Foreign Correspondents, Twitter; Rick Sanchez Defends CNN</a> and Brian Solisâ€™ <a href="http://www.techcrunch.com/2009/06/17/is-twitter-the-cnn-of-the-new-media-generation/">post on techcrunch</a>. Christopher R. Weingarten (<a href="http://twitter.com/1000timesyes" target="_blank">@1000TimesYes</a>), <a href="http://pulverblog.pulver.com/archives/008954.html" target="_blank">â€œTwitter and the End Of Music Criticism,â€</a> and <a href="http://www.moeed.com/" target="_blank">Moeed Ahmad&#8217;s</a> (<a href="http://twitter.com/moeed" target="_blank">@moeed</a>), <a href="http://www.moeed.com/blog/2009/05/20/gaza-focus-media-140-conference-london" target="_blank">Gaza in Focus</a>, are two of several must see presentations. The #140Conf was an extraordinary event.Â  Jeff Pulver orchestrated a brilliant cast of characters and a manifestation of social media â€œhybrid vigorâ€ that was exhilarating to be part of.<span><span> </span></span></p>
<p>A â€œDirectorâ€™s Cutâ€ of <span><span>#140conf will be re-broadcast (Monday, June 29th and Tuesday, June 30th) at 11AM EST / 8AM PST &#8211; <a rel="nofollow" href="http://140conf.com/watchit" target="_blank">http://140conf.com/watchit</a>. </span></span>Some of the speakers will be tweeting while their session is being re-broadcast (<a href="http://pulverblog.pulver.com/archives/008960.html" target="_blank">see The Jeff Pulver Blog for more</a>).</p>
<p><strong><strong> </strong></strong><strong> </strong></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3635038955_2998f2a9e1_b.jpg"><img class="alignnone size-medium wp-image-3886" title="3635038955_2998f2a9e1_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3635038955_2998f2a9e1_b-300x200.jpg" alt="3635038955_2998f2a9e1_b" width="300" height="200" /></a></span></span></p>
<p>(picture above from <a href="http://www.briansolis.com/" target="_blank">Brian Solis&#8217;</a> Flickr<a href="http://www.flickr.com/photos/briansolis/3635038955/sizes/l/in/set-72157619870975030/" target="_blank"> here</a>)</p>
<p>In a serendipitous convergence of events I found myself in the front row taking photos <a href="http://www.flickr.com/photos/briansolis/sets/72157619870975030/" target="_blank">for Brian Solis</a> (@briansolis) see Brian&#8217;s post, <a href="http://www.briansolis.com/2009/06/is-twitter-the-cnn-of-the-new-media-generation/" target="_blank">&#8220;Is Twitter the CNN of the New Media Generation.&#8221;</a> I like <a href="http://www.flickr.com/photos/briansolis/3635866464/in/set-72157619870975030/" target="_blank">my photo of Jack Dorsey</a> (@jack) Twitter founder &#8211; the lens of my own camera would never have allowed for this one!</p>
<p>I was also sitting close to Stowe Boyd (@stoweboyd), who out of all of attendees at this jam packed event was one of the people I had most hoped to connect with.</p>
<p><span style="font-size: medium;"><strong>Talking with Bruce Sterling</strong></span><span style="font-size: medium;"><strong> about Squelettes, Twitter, Favela Chic, and Gothic High Tech<br />
</strong></span></p>
<p>I have been following the <a href="http://microsyntax.org/" target="_blank">microsyntax.org</a> effort that Stowe has been leading since <a href="http://www.wired.com/beyond_the_beyond/2009/05/spime-watch-pachube-feeds/" target="_blank">this post by Bruce Sterling  (@bruces) on Pachube Feeds</a> which contained this challenge:</p>
<p><strong>â€œ(((Extra credit for eager ubicomp hackers: combine this [<a href="http://www.pachube.com/" target="_blank">pachube</a> feeds] with Googlewave, then describe it in microsyntax. Hello, 2015!)))â€</strong></p>
<p>Stowe pointed out in our conversation at #140conf, that Microsyntax.org is in one sense a very narrow project but on the other hand itâ€™s very broad, because every sort of information that you can imagine is going to be streaming through Twitter and related [real time] applications.</p>
<p>Or as <a href="http://www.aaronland.net/" target="_blank">Aaron Straup Cope</a> put it to me: <strong>â€œThis is ultimately the â€œmagic wordâ€ problem, which is essentially the semweb vs. google-is-smarter-than-you problem.â€</strong></p>
<p>There are a bunch of crystal ball posts up at the moment looking into the future of the real time webâ€¦. for example, <a href="http://threeminds.organic.com/2009/06/docs_are_old-school_we_need_pa.html?utm_source=twitter&amp;utm_medium=threeminds&amp;utm_campaign=praise" target="_blank">this post on threeminds.organic</a> (via @timoreilly and @<a href="http://twitter.com/buckybit" target="_blank">buckybit</a>) asking whether we need page rank for people and not just sitesâ€¦..and <a href="http://www.readwriteweb.com/archives/as_the_sun_sets_on_myspace_-_what_will_beat_facebo.php#more" target="_blank">this post on readwriteweb</a> that asks is the state of now the harbinger of doom to walled gardens like Facebook. And there seems to be an arms race starting around real time search.</p>
<p>But Bruce Sterling (<a href="http://twitter.com/bruces" target="_blank">@bruces</a>) in <a href="http://interactions.acm.org/content/?p=1244" target="_blank">his cover story</a> for <a href="http://interactions.acm.org/" target="_blank">Interactions Magazine</a> examines some of the blinkering on <strong style="font-weight: normal;">â€œt</strong>wo inherently forward looking schools of thought and action [design and science fiction].â€ He writes:</p>
<p><strong>â€œWe have entered an unimagined culture. In this world of search engines and cross-links, of keywords and networks, the solid smokestacks of yesterdayâ€™s disciplines have blown out.â€</strong></p>
<p>While I was writing up this post, I found myself up at the crack of doom (4 am EST) with insomnia I attribute to a tweet from <a href="http://www.experientia.com/en/who-we-are/mark-vanderbeeken/" target="_blank">Mark Vanderbeeken</a> <a href="http://twitter.com/Vanderbeeken" target="_blank">@vanderbeeken</a> which I (<a href="http://twitter.com/tishshute">@tishshute</a> ) retweeted:</p>
<p><strong>â€œInternet of Things &#8211; An action plan for Europe,â€  (This EU Doc.  cites @<a href="http://twitter.com/agpublic" target="_blank">agpublic</a> â€™s Everyware) <a rel="nofollow" href="http://bit.ly/16uiu3" target="_blank">http://bit.ly/16uiu3</a> via @<a href="http://twitter.com/vanderbeeken" target="_blank">vanderbeeken</a>â€œ</strong></p>
<p>(I wish I had used the new microsyntax in Tweetdeck RE (for more on RE <a href="http://www.stoweboyd.com/message/2009/06/a-useful-bit-of-microsyntax-re.html" target="_blank">see Stowe Boydâ€™s post here</a>) then I would have been able to find @vanderbeekenâ€™s original tweet just now.)</p>
<p>So after a quick scan of the EU paper on the internet of things, and in a â€œhere comes everybodyâ€ pre-dawn state of mind, craving oracular pronouncement, I impulsively shot an email to Bruce Sterling.</p>
<p>[<strong>Note:</strong> the following is an asynchronous exchange &#8211; not synchronous as a <a href="http://wave.google.com/">Google Wave</a> would have made possible. Also I have pulled the conversation out of the original email format. Lars and Jens Rasmussen ofÂ  <a href="http://wave.google.com/">Google Wave</a> seem to have hit the nail on the head when they &#8220;set out to answer the question: What would email look like if we set out to invent it today?&#8221; (see <a href="http://radar.oreilly.com/2009/05/google-wave-what-might-email-l.html" target="_blank">this excellent post by Tim O&#8217;Reilly on Google Wave</a>)]</p>
<p><strong>Tish Shute: </strong>I shouldnâ€™t be up at 4am EST sending you more questions but I began reading The â€œInternet of Things â€“ An action plan for Europe,â€Â <a href="http://bit.ly/16uiu3" target="_blank">http://bit.ly/16uiu3</a> before I went to sleep and woke up thinking: â€œHow can we work on an action plan for everybody?â€ ((Another highlight of 140Conf. was <a href="http://www.areacodeinc.com/" target="_blank">Kevin Slavinâ€™s talk on â€œThings that Twitter</a> â€“Â  â€œsensor aesthetics are streamyâ€)).</p>
<p><strong>Bruce Sterling: *Everybody? Â What, allÂ <span style="font-family: arial;"><span style="font-size: small;">6,706,993,152 of us?</span></span></strong></p>
<p><strong>Tish Shute:</strong> How does, â€œitâ€™s all about the data,â€ and â€œgoogleâ€™s smarter than youâ€ thinking versus &#8220;bottom up&#8221;/&#8221;personal informatics&#8221;/&#8221;sem web&#8221; get worked out in the internet of things?</p>
<p><strong>Bruce Sterling:</strong> *<strong>Iâ€™d be guessing via mergers, acquisitions, lawsuits and police crackdowns, but you never know. Â You might have a massive financial collapse where innovations like this start coming out of slums and favelas. Â I heard such a great term at LIFT last week: Â â€Favela Chic.â€ Â Thatâ€™s when you are totally penniless and without commercial prospects of any kind but still wired to the gills and big on Facebook.</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3653530586_eb90ef0241_o.jpg"><img class="alignnone size-medium wp-image-3852" title="3653530586_eb90ef0241_o" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3653530586_eb90ef0241_o-300x207.jpg" alt="3653530586_eb90ef0241_o" width="300" height="207" /></a><br />
</strong></p>
<p>Photo of Bruce Sterling at Lift 2009 by <a href="http://www.flickr.com/photos/centralasian/" target="_blank">Centralasian</a></p>
<p><strong>Tish Shute:</strong> Could you elaborate on your comment:</p>
<blockquote><p><em><strong>&#8220;Also, this stuff theyâ€™re discussing: this is like all kindsa trouble ten years from now.&#8221; (from your postÂ <a href="http://www.wired.com/beyond_the_beyond/2009/03/spime-watch-data-shadows/" target="_blank">http://www.wired.com/beyond_the_beyond/2009/03/spime-watch-data-shadows/</a>)</strong></em></p></blockquote>
<p><strong>Bruce Sterling:</strong> <strong>*Okay: you know how much trouble SMS messages are in Iran right now, even though ten years ago, cellphones were only for foreigners and rich guys in Iran? Â Kinda like that.</strong></p>
<p><strong>Tish Shute</strong>: <a href="http://www.wired.com/beyond_the_beyond/2009/06/ruins-of-the-present/" target="_blank">You wrote here</a>:<em> &#8220;<strong>The idea of living in *abandoned prototypes* or giant failed larvalÂ  husks is very contemporary, very New Depression. Very â€œFavela Chic&#8230;â€</strong></em></p>
<p><a href="../wp-content/uploads/2009/06/squelette-300x221.jpg" target="_blank"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/squelette-300x2211.jpg"><img class="alignnone size-full wp-image-3855" title="squelette-300x221" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/squelette-300x2211.jpg" alt="squelette-300x221" width="300" height="221" /></a></p>
<p>And:</p>
<p><em><strong>&#8220;Ocasionally squatters move into â€œsquelettesâ€ and bring in some breeze-block, corrugated tin and plastic hoses, transforming squelettes into high-rise favelas. This doesnâ€™t work very well because itâ€™s tough to manage the utilities, especially the water.&#8221;</strong></em></p>
<p><strong>Tish Shute:</strong> So what happens when we rely on Google &amp; Twitter repurposed as our main means to access our government?Â  Not only repressive regimes can cut these utilities off, even though Twitter was asked to delay maintenance so that the Iranian Twitters could keep flowing, Michael Jackson brought Twitter down.</p>
<p><strong>Bruce Sterling: *Google and Twitter aren&#8217;t going to last long enough to become main means of an access to government. Â It&#8217;s not that Google and Twitter go away and we return to a previous status quo, however. Â It&#8217;s that they are ramshackle digital expedients that get replaced by Â even more ramshackle digital expedients.</strong></p>
<p><strong>In the meantime the stuff we used to call &#8220;government&#8221; gets similarly destabilized. Â It&#8217;s been privatized, or offshored, or turned into a hollow shell.</strong></p>
<p><strong>Tish: Shute:</strong> So is Twitter a squelette (like all our favorite internet platforms, including Google Wave which we havenâ€™t even had a chance to squat yet)? And is microsyntax our breeze-block, plastic hose and corrugated tin-Â  â€“ very Favela chic but vulnerable to the vagaries of Michael Jackson&#8217;s life and death, and deadly shut downs and snooping by repressive regimes that control the underlying utilities? (Squelettes, as Bruce Sterling points out, are:Â <strong><em> </em></strong><strong><em>â€œone of those coinages like â€œPrada Gothâ€ that spring out everywhere once they are pointed out.â€</em></strong><em>)</em></p>
<p><strong>Bruce Sterling: *We can draw a distinction here: Â &#8220;Gothic High Tech&#8221; is the top-end version, while &#8220;Favela Chic&#8221; is the low-end. Â &#8220;Gothic High Tech&#8221; would be the likes of a &#8220;repressive regime&#8221; which finds itself forced to conduct cruel, secret, spooky, Guantanamo cyberwars&#8230; it&#8217;s pretending to transparency, accountability and open elections, while below that surface is a weird, torchlit, Gothic hall of mirrors where invisible hands wreck banks, impoverish the civil population and kidnap people.</strong></p>
<p><strong>It&#8217;s &#8220;Gothic&#8221; because of its magnificent, elaborate appearance &#8212; very &#8220;Castle of Dracula&#8221; &#8212; but that no longer maps onto its panicky, extremist, transgressive behavior.</strong></p>
<p><strong>Gothic High Tech doesn&#8217;t live in &#8220;squelettes.&#8221; Â Gothic High Tech lives in fancier, more respectable structures called &#8220;stuffed animals.&#8221; Â A stuffed-animal used to be a functional building. From the outside it looks pretty much like it always did, maybe even &#8220;conservative.&#8221; Â Inside it&#8217;s half-retrofitted with aging, Frankenstein machineries, already outmoded, rapidly decaying.</strong></p>
<p><strong>A &#8220;stuffed animal&#8221; might, for instance, be a &#8220;savings and loan&#8221; where the behavior of the present-day inhabitants involves no actual saving and no actual loaning. Â Instead the inhabitants are on television negotiating a position in a crisis narrative and living on bailouts, while, every day, the cobwebs get a little thicker. Â &#8220;Regulatory capture&#8221; is stuffed-animal activity. Â &#8220;Failed states&#8221; and &#8220;hollow states&#8221; are stuffed animals.</strong></p>
<p><strong>&#8220;Favela Chic&#8221; is the same basic activity, but with much less money and institutional clout. Â In &#8220;Favela Chic&#8221; nobody bothers to ask for bailouts. Â They know the state has failed, or they themselves are engaged in weird activities they prefer to hide from the authorities. Â  &#8220;Favela Chic&#8221; lives within openly failed structures, or else in half-structures that are in &#8220;permanent beta&#8221; and falling down as rapidly as they can be erected. Â Favela Chic is bottom-up, open-sourced, heavily networked, subversive and piratical.</strong></p>
<p><strong>There&#8217;s a certain amount of class-transition between Gothic High Tech and Favela Chic &#8212; like, Twitter was Favela Chic and is heading straight for Gothic High Tech. Â But there&#8217;s much less transition than there used to be, because of income differentiation &#8212; the tiny faction of Gothic moguls &#8220;own&#8221; what&#8217;s left of most of the wealth, which they themselves are rapidly destroying. Â The general trend is not toward increasing global prosperity. Â The precarity is becoming general. Â The Favela beckons for everybody. Â That&#8217;s where most of the planet&#8217;s population lives already, and it&#8217;s certainly where most of the young people live. Â The idea of a &#8220;developing world&#8221; needs to be reversed; the end game is in the &#8220;developing world&#8221; and the rich nations are heading there.</strong></p>
<p><strong>Tish Shute:</strong> It seems to me that Twitter and the real time web of flow is a revolution in our means of communication presenting awesome opportunities.Â  But, are we squatters in an infrastructure that is hard to manage?</p>
<p><strong>Bruce Sterling: *Yes. I&#8217;d go farther and say that we are squatters in an infrastructure that methodically destroys previous systems of management. Â Especially itself: the closer you are to a revolutionary real-time web flow, the faster you have to reboot.</strong></p>
<p><strong>Tish Shute:</strong> And what is the answer to the question at the end of <a href="http://interactions.acm.org/content/?p=1244" target="_blank">your cover story for Interactions</a>:</p>
<p><strong><em>&#8220;The winds of the Net are full of straws. Who will make the bricks?&#8221;</em></strong></p>
<p><strong>Bruce Sterling: *I frankly have no idea. Â The storm-gusts are rising in a hurry and we are in for a whole lot of straws.</strong></p>
<p><strong>*I would point out that, if we could make up out minds about what kind of bricks we wanted, we could make them at tremendous speed. Â We&#8217;re not helpless: our productive capacity is frankly fantastic. Â Clearly we&#8217;ve lost the thread and can no longer explain what we&#8217;ve done to ourselves or how we get out of our fix. Â But we might surprise ourselves. Â 21st century Favela Chic is no mere favela, and Gothic High Tech isn&#8217;t just Gothic, it&#8217;s also very high tech. Â We&#8217;re in a Depression and it&#8217;s gonna last, but this is no 1930s Depression.</strong></p>
<p><strong><br />
</strong></p>
<h3><strong><a name="StoweInterview">Talking with Stowe Boyd</a></strong></h3>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3629162035_a9332a67e1_o.jpg"><img class="alignnone size-medium wp-image-3862" title="3629162035_a9332a67e1_o" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3629162035_a9332a67e1_o-300x247.jpg" alt="3629162035_a9332a67e1_o" width="300" height="247" /></a><br />
</strong></p>
<p>Photo <a href="http://www.flickr.com/photos/stoweboyd/3629162035/" target="_blank">from Stowe Boyd&#8217;s Flickr stream,</a> &#8220;Little&#8221; Tower Of Babel, Pieter Bruegel the Younger. It is also a slide from his presentation, <a href="http://blip.tv/file/2267166" target="_blank">â€œThe evolution of Microsyntax.&#8221;</a></p>
<p><strong>[Note:</strong> Most of this conversation took part in a busy foyer at #140conf and various people joined in the conversation at different points.Â  I have cut out these other conversations and tried to maintain the thread of my own questions in the transcription.Â  But this may have resulted in a sense of choppiness and discontinuity in places.]</p>
<p><strong><strong>Tish Shute:</strong></strong> You have been on the front-line of so much web innovation, but, perhaps, you could give me a little back story on how you came to take the lead with microsyntax.org.</p>
<p><strong><strong>Stowe Boyd: </strong>Well, I&#8217;ve been on twitter 990 days or something. But long before Twitter became a commonplace household word, I&#8217;ve been advocating what I&#8217;ve been calling flow application, based on the streaming metaphor &#8211; the notion that you&#8217;d have a stream of updates coming from people that you chose to follow, which is now being called the asymmetric follow model. Years and years ago I postulated that that model was going to come along and completely change all future significant social applications. Back in the late nineties, I introduced a term &#8220;Social tools&#8221; and said social tools were going to come along and change the way the web worked. So I have a history of being 4 or 5 years ahead of what actually happens.</strong></p>
<p><strong>Microsyntax is sort of an interesting outgrowth of that. In a way it&#8217;s a very narrow area, in the sense that it&#8217;s focusing on these information patterns, the way that people want to encode information in the twitter stream or in the realtime stream of other apps. So it&#8217;s very narrow in the sense that it doesn&#8217;t immediately include all sorts of other things like these sports figures talking about how to market their services or whatever. But on the other hand it&#8217;s very broad, because every sort of information that you can imagine is going to be streaming through twitter and related applications.</strong></p>
<p><strong>We saw examples today of plants demanding water or DJ&#8217;s posting their set lists as they&#8217;re playing them, devices or equipment talking about its status, video stream from surveilance cameras. Everything you can possibly imagine will find it&#8217;s way in that stream. It&#8217;s all going to be encoded in different ways and grappling with that is actually an interesting problem. But more importantly it&#8217;s better for us as a community of users if we try to approach it in some systematic fashion. That&#8217;s the purpose of Microsyntax.org &#8211; this nonprofit. The concept of microsyntax is immediately evident to people who use Twitter, and that is we have a whole bunch of conventions that have emerged, and we have some places where it would be nice if conventions did emerge, but we don&#8217;t have them yet. And the idea of creating a nonprofit to do it is a sensible thing to do. So I decided I&#8217;ll go along with the request that others have made, because other people asked me to do this. So that&#8217;s a little unusual for me.</strong></p>
<p><strong>The Web of Flow<br />
</strong></p>
<p><strong>Tish: </strong>What first attracted my attention to Microsyntax.org was Bruce Sterling&#8217;s post<strong> </strong><strong>suggesting combining pachube feeds with Googlewave and then describing this in microsyntax</strong>. Why do you think Bruce Sterling posed this particular challenge?</p>
<p><strong>Stowe: Well, because he sees that everything is moving into the web of flow. Everything is moving out of the web of pages. In the next ten years we&#8217;re going to cease to experience the web as we do now, which is as a bunch of pages and we move around from link to link. And that&#8217;s what browsers are about. They help us move from page to page on the web. But Twitter, and before it the minifeed and instant messaging and a handful of other really interesting applications, have suggested a completely different web where information flows from other people to you through streaming mechanisms.</strong></p>
<p><strong> And the really interesting stuff that comes to me now on a daily basis is streaming to me through Twitter, not through my RSS reader, not me wandering around figuring out what to google, news or something. And that&#8217;s an indicator of the fact that that&#8217;s the hottest, coolest way to do it now, and means that in the future it will be &#8220;the way&#8221; that it&#8217;s done. So there will still be a web of pages out there, but it&#8217;ll exist like an archive. And we won&#8217;t experience the web that way in general because, &#8220;why would I go to the web page and see the guy&#8217;s blog post on his page, when it&#8217;s been served up to me 16 other ways?&#8221; And most importantly I&#8217;ve found it initially in some client, because somebody recommended it to me, and I resolved it in a hover window in my Twitter client. I&#8217;d never go to the page. I comment on it here&#8230;</strong><strong><br />
</strong></p>
<p><strong>Tish:</strong> I like your framing,Â  &#8220;the web of flow&#8230;&#8221;</p>
<p><strong>Stowe: Well it&#8217;s also that one of the characteristics is the tempo is different. I actually wrote a post about this, that I think it&#8217;s fundamentally important. It&#8217;s not really gotten much drift yet. I think it&#8217;s too hard for people to think this way. They just can&#8217;t get it. </strong></p>
<p><strong>The dimension that&#8217;s really most interesting is the transition from secret to private to public. The fact that Twitter is inherently public as a default is a breakthrough. I mean there&#8217;s nothing else like this. The first time that the idea, except for the blogosphere itself which is the concept it&#8217;s built on,Â  the inherent notion is that you&#8217;re publishing stuff and anyone can get access to it. But the tempo thing really matters, the fact that it&#8217;s near synchronous so your perception of what you feel like you&#8217;re doing is you feel like you&#8217;re in a stream of updates from friends. We know that. But the sensation is dramatically different than your close personal relationship with your inbox, which is email. Email is secret, closed, and the sense is the context is that it&#8217;s an inbox, like the one on your desk. And you are boxed in by that, and you&#8217;re not actually feeling like you&#8217;re dealing with people. You feel like you&#8217;re dealing with the inbox.</strong></p>
<p><strong>Tish:</strong> This was only present in boxes as you say &#8211; chat rooms, IM, IRC, MUDs, Virtual Worlds but they all had that realtime experience going on.</p>
<p><strong>Stowe: Yes instant messaging, chat rooms, etc. they were private. You had to invite people. The update paradigm on instant messaging was backwards. It said I want to follow this guys updates, but you had to get his permissin to do it. That seemed like a sensible thing in the mid &#8217;90s when people worried about privacy and so they made it private. And private is not good, actually.</strong></p>
<p><strong>Tish: </strong>IRC is exactly like twitter but it&#8217;s off in closed worlds&#8230;</p>
<p><strong>Stowe: Yes you have to know about them. You can&#8217;t just stumble across them, you have to be invited or give the password. It&#8217;s another closed model. But instant messaging is the father of all this, or the mother, depending on which way you look at it. But that fundamental last thing, it&#8217;s based on a quote by Gabriel GarcÃ­a MÃ¡rquez</strong> <strong>which is, &#8220;All people have three lives. they have a public life, a private life and a secret life.&#8221; And we are philosophically moving from a time where things were primarily secret (pre internet) to a time where things were primarily private which is web 1.0 into this new web where things are going to be primarily public and open and immediate. So we are building the scaffolding real fast to allow that to happen. And it&#8217;ll take us away from the old web. The old web will go down there. Everything&#8217;s built on dirt right? Do you see very much dirt in cities? No. No. The dirt is all concealed. It&#8217;s down there. If you want to go find it you can dig underneath the floor, and there&#8217;s dirt under there. But most people don&#8217;t spend very much time down there we send professionals down there to put plumbing and pipes underneath and we experience the world like this.</strong></p>
<p><strong>Tish: </strong>I met Eric Horvitz (Microsoft Researcher) at <a href="http://en.oreilly.com/where2009/" target="_blank">Where 2.0</a>.Â  He is interested in community sensing and ideas about how people can share data in a win win way (<a href="http://en.oreilly.com/where2009/public/schedule/speaker/49828" target="_blank">see here</a>). Do we need to work out ways to make sure people&#8217;s relationship to their data is not just to have it harvested by others for profit or repression?</p>
<p><strong>Stowe: I&#8217;m interested in this actually. I recently wrote a piece about the governance of Twitter and for the purpose of your question let&#8217;s just go along with the premise that Twitter&#8217;s going to continue to be benevolent, and everything will be open, and everything will be public and everyone can do whatever they want with it. Well there&#8217;s a tremendous amount of things that people will want to do, but most of the things that they will set about doing to begin with will turn out to be irrelevant. </strong></p>
<p><strong>People will want to measure sentiment and all this other stuff, for example. And they&#8217;ll do that and they&#8217;ll coerce a lot of big brands and so on to pay money for these services. But the thing that&#8217;s going on with the now web, my web of flow is that people are disconnecting from self identity based on mass affiliations. So ultimately the more you spend your time doing this, you don&#8217;t give a s**t about brands. Nike &#8211; I could care less. </strong></p>
<p><strong>So there is defection from the mass media. We heard it today. There&#8217;s people here who were like booing these media guys, who think they should be held up as gods because, &#8220;Oh I&#8217;m one of the first to use Twitter on TV.&#8221; Well F*** you, I don&#8217;t give a s***. I don&#8217;t watch television. Every hour that people spend on the internet is an hour they do not spend watching television. It&#8217;s a direct and one to one correlation. Sure people still want to get their fill of whatever, the NBA playoffs, but significantly less than ever before. Which is why they&#8217;re increasingly irrelevant. </strong></p>
<p><strong>So the idea that some magicians are going to come along, figure out how to mine this data to find out how I feel about my automobile? I do not have a close personal relationship with an automobile. I don&#8217;t. And increasingly people won&#8217;t affiliate that way. They won&#8217;t bond with their stuff like that. That&#8217;s why I say most of this information won&#8217;t be helpful. It&#8217;ll be interesting sociologically. Webthropologists will be able to make it interesting &#8211; and marketing people, who are trying to figure what&#8217;s going on, might be able to do the right thing. But if they&#8217;re trying to take it and make it do something for them&#8230; They&#8217;re going to try to take it and use it to change us? To control us? It&#8217;s like that line in The Labyrinth,Â  &#8220;you don&#8217;t have any power over me anymore.&#8221;</strong></p>
<p><strong>Tish: </strong>You are actually saying something much more radical than say community sensing or that we need to store our own data. You seem to be saying that in some ways it doesn&#8217;t matter whether you store your own data or your data&#8217;s in the cloud (although Iran seems to be showing how centralized network control can be a powerful tool of repression).</p>
<p><strong>Stowe: Most of the things that they&#8217;re going to try to use it to do won&#8217;t work because we&#8217;re not the same anymore. It&#8217;s inevitable. </strong><strong>You use these tools, and you are changed. And itâ€™s just a question of how long you use them and the longer you use them, the more you use them, the more changed you are. When people shift to a basis of sociality around connection with other people as opposed to mass affiliation, itâ€™s different. Itâ€™s completely different. Your whole system of ethics, the way you judge the world and decide whatâ€™s important, is different. And not only different itâ€™s better. Itâ€™s a better way to deal with the world.</strong><strong> And these guys are still hoping that the old rules hold, but they don&#8217;t. They just won&#8217;t.</strong></p>
<p><strong>Tish:</strong> This isÂ  rather a broad question. But one of the things that Kevin Slavin brought up in his talk is about things that tweet &#8211; your plant is tweeting, your shoes are tweeting, your house is tweeting. Twitter is a natural medium for the internet of things and what Kevin Slavin calls the &#8220;streamy aesthetics of sensors.&#8221; But with all these things that are tweeting people have had a lot of problems with filtering that kind of flood of tweets.Â  For example, I may want to listen to a tweet from my plant telling me it needs water when I am actually at home and can do something about it. But I may not want to listen to my plant whining about being thirsty all the time. Can microsyntax help? Or is this a place for those appliances you mentioned earlier?</p>
<p><strong>Stowe:Â  There&#8217;s a whole other category of stuff having to do with priorities &#8211; this isn&#8217;t really a microsyntax &#8211; of different times of day when you&#8217;re involved in different activities. You may be more or less interested in different collections of Twitter streams. And the notion of how you go about dealing with that is &#8211; it could semi-microsyntactical, but maybe it isn&#8217;t at all. Maybe it&#8217;s all just having to do with the way that clever client apps work. So maybe if you have a super duper Tweet Deck, and you say it&#8217;s evening time and I&#8217;m in my evening mode, so a whole bunch get blocked and a different group of people, for example, your Parcheesi evening friends get enabled, and at the weekend when you have time to do house care you listen to your house.</strong></p>
<p><strong>I don&#8217;t think this is a microsyntactical issue. I don&#8217;t think this is an issue of what&#8217;s embedded in the stream except as a notion of priorities. There&#8217;s a lot of people who would like to have a mechanism to indicate priority. But I can&#8217;t think of any effective way to do it that wouldn&#8217;t immediately be abused. Of course anything can be abused. This guy thinks that this is high priority, but maybe once again it&#8217;s one of these sort of mutual dimensions where they want to indicate it&#8217;s high priority but I say I only believe in priorities from certain people.</strong></p>
<p><strong> But still there might be a case to be made for allowing people to put some kind of indication of priority in a tweet, so that there is a hope that it could rise out of the clutter. I talked about some things that I&#8217;m interested in that are just purely operational. One of these things I want to get people to build, in Tweet Deck, but it could be in any kind of a client, I want to be able to say don&#8217;t let this tweet go away. So I&#8217;m getting them to build the pushpin. So I can put a pushpin in the thing and it&#8217;ll stay at the top, or stay at the bottom, wherever I put it. And then I can respond to it later, because if I don&#8217;t respond to it right now, in most places it goes bye, and then you&#8217;ve got to go search for it &#8211; a pain in the ass. </strong></p>
<p><strong>Then I say if I&#8217;m going to have pushpins I want to have a record of all the things that I&#8217;ve push pinned &#8211; a history of pushpins. But it&#8217;s all client based. It&#8217;s got nothing to do with what&#8217;s in the text. </strong></p>
<p><strong>Tish:</strong> And knowing how many of your followers had already got a particularly tweet from somewhere else which would be very useful has to be done as an appliance&#8230;</p>
<p><strong>Stowe: Yes that&#8217;s sort of a downstream metrics kind of thing.</strong></p>
<p><strong>Microsyntax is not the answer to every kind of thing. Like, appropriately dealing with hash tags in a sensible fashion is not purely a function of how we use them. But some of it is the structure itself. That&#8217;s why I came up with the subtags model. So everybody at <a href="http://en.oreilly.com/where2009/public/schedule/speaker/49828" target="_blank">South by South Wes</a>t tagged everything southbysouthwest, so if you searched for it there were 150,000 hits a day. So it was useless. But if people had used the subtags model, or something else like that, you could have searched for the subtag. So you could have searched for south-by-southwest.parties or south-by-southwest.thirtytwo-bit which was a particular party.</strong></p>
<p><strong> And so if you have sensible tools that are doing a better job of aggregating information around more complicated ways of structuring hash information, then we can get past the fact that brute force search just isn&#8217;t going to work. It just won&#8217;t work. For example somebody going through the stuff from today all the stuff that says #140conf but they want to find just the stuff that had to do with media, they wont be able to do it. They&#8217;ll have to do it manually. So some of that is better syntax. But some of it is better tools. I mean somebody should go build a better hashtags.org. </strong></p>
<p><strong>Tish: </strong>And in terms of creating a web of flow not all of what we need can been done within the Twitter messages &#8211; it has to be done in the client and external applications<strong>&#8230;<br />
</strong></p>
<p><strong>Stowe: Yes, there&#8217;s this class of applications that listen very diligently to what you&#8217;re doing in Twitter. The primary mechanism of how you influence the app is doing stuff in Twitter. You can always go to the app and look at it and fool with it. But, if in fact, the preponderance of your interaction is, it&#8217;s listening or talking to you in Twitter &#8211; I call that an appliance, to distinguish it from these other apps. Any external application might provide you with the mechanism to dump information into Twitter, but you have to go to the app to do the primary kinds of interaction. In fact major functionality may not be available at all in Twitter or maybe no functionality, except for like <a href="http://brightkite.com/" target="_blank">Brightkite</a> allows you to dump stuff into Twitter. But the idea is that primarily you do it there. Or there&#8217;s a very limited thing like you get with Brightkite, you can send a message saying, &#8220;I&#8217;m somewhere.&#8221;</strong></p>
<p><strong>Tish: </strong>Should location be put into tags?</p>
<p><strong>Stowe: I don&#8217;t think that location should be put into tags. In other words, if I talk about Paris, then using hashtags is sensible. Or I&#8217;m talking about Sherlock Holmes and his relationship to London. It&#8217;s a conceptual thing &#8211; like talking about Heaven. It doesn&#8217;t actually have to exist on the planet somewhere. But it&#8217;s really different if you say I am in New York City right now or the more interesting case I think really is, &#8220;I am going to be in Boston colon next week&#8221; or June 15 dash 17. And I want that information to be available to everybody or a select group of my friends, or just to myself and have it find it&#8217;s way into my calendar. But that&#8217;s really different than saying &#8220;I&#8217;ve always enjoyed it when I visit HASH New York.&#8221; </strong></p>
<p><strong>Tish:</strong> I liked Kevin Slavin&#8217;sÂ  phrase &#8220;the streamy aesthetics of sensors.&#8221; I guess streamy aesthetics is something you have given a lot of thought to?<strong><br />
</strong></p>
<p><strong>Stowe: First of all I read a lot of poetry, so I believe in poetics in reading and writing. But I don&#8217;t think punctuation marks really degrade that dramatically. I mean it&#8217;s OK to have periods and exclamation marks and commas, and things can still be poetic. I think it&#8217;s important to try to dream up microsyntax that doesn&#8217;t take your eyes off the content, the stuff that people are really trying to say. So that&#8217;s why for example I hate L: as a location queue because anything that has letters in it, if you&#8217;re not supposed to say them, &#8211; if you&#8217;re not mentally supposed to say them, or if you&#8217;re not supposed to say them if you read it aloud, causes you to do a stutter step when you&#8217;re reading the tweet. </strong></p>
<p><strong>But if you use punctuation marks, special characters at various points or placement conventions, like where do things appear in order in a tweet, those things don&#8217;t have the same toe stub, that I think really ugly syntactic conventions would. So it&#8217;s possible to make these things pretty. For example I&#8217;m testing out trying on various conventions for what do you do with a re-tweet. If you want to re-tweet it, if you actually want to have people see it, and then you want to make your own comment. So the question is how do you separate the two? So, RT &#8211; guy&#8217;s name and then text. Well then how do you know where his text ends and my text begins. So certain things don&#8217;t work for me. I mean like a comma is not enough because there might be a comma in the text. And a period doesn&#8217;t work because there might be multiple sentences. So it has to be something else.<br />
</strong></p>
<p><strong>Tish:</strong> And aren&#8217;t there confusions that arise because there are already conventions of usage&#8230;</p>
<p><strong>Stowe: Yes, I have problems with angle brackets, for example. Sometimes when the tweets wind up in not particularly smart rendering systems, it gets confused because it thinks they&#8217;re html. For example, somebody was using the open angle bracket, and even though it&#8217;s just text, and it&#8217;s not html, when I took that tweet and put it in a blog post, it thought it was the start of an html tag, and so it disappeared. You could use an html escape character but that&#8217;s the kind of thing that causes problems. The other problem is there are other ways that it&#8217;s been used a lot. People have used this as the thing to introduce the comment that they&#8217;re making after a re-tweet.</strong></p>
<p><strong>Tish: </strong>There must be very few characters not being used for other things?<strong><br />
</strong></p>
<p><strong>Stowe:Â  Yes but for example, when we use geoslashes there&#8217;s a blank in front of it, or it&#8217;s the first character in the tweet &#8211; so i</strong><strong>n that particular exampleÂ  it is similar because slash is used for other things. </strong><strong> But, in all the places where it is used, generally there&#8217;s a character that precedes it &#8211; like &#8220;w/o&#8221; for without or a fraction or a long list list ofÂ  these options. </strong><strong> </strong><strong>[</strong>Geoslash is microsyntax for user location using slash (&#8216;/&#8217;) &#8212; as in &#8216;just arrived /SFO&#8217; or &#8216;heading to /New York: tomorrow/&#8217; for more see <a href="http://microsyntax.pbworks.com/Geoslash" target="_blank">Stowe&#8217;s post here</a>.]</p>
<p><strong>When I was rooting around for a character I looked for a long time.Â  And also I wanted to make sure that the slash was easily reachable on cell phones, which, for example, angle bracket isn&#8217;t. So if you&#8217;re on a phone and you want to say I&#8217;m here &#8211; I don&#8217;t know how far you have to go on your phone, but it isn&#8217;t in the first eight characters of Symbian. I looked carefully to make sure it wasn&#8217;t a common character that people use widely in everyday speech like commas and semicolons and exclamation marks, but was still easily used. There are still other alternatives. It&#8217;s not the only one. There are cases to be made for all of these things &#8211; pros and cons for all of them.</strong></p>
<p><strong><br />
Anyway I was making the case of experimenting with different things for this re-tweet, &#8220;Here&#8217;s my comment.&#8221; And I was trying all sorts of stuff like double colon, I tried all kinds of things I wanted to see what it looked like. So starting this week I used the solid bar, the upright bar. It sets it off. It really feels like there&#8217;s a divide. There&#8217;s a cleavage point, and that&#8217;s that guy and this is this guy. So I&#8217;m going to write it up as one of the candidates. Some people use square brackets and many other things. There are many personal conventions but nothing has become a real convention, accepted as the norm.</strong></p>
<p><strong>[ </strong>Note: Our conversation ended here as the presentations had resumed at <a href="http://www.140conf.com/" target="_blank">140 Characters Conference</a> ]</p>
<p><strong><br />
</strong></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/feed/</wfw:commentRss>
		<slash:comments>4</slash:comments>
		</item>
		<item>
		<title>Sensor Networks and Sustainability: &#8220;Connecting Real, Virtual, Mobile and Augmented Spaces&#8221;</title>
		<link>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/</link>
		<comments>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/#comments</comments>
		<pubDate>Sun, 19 Apr 2009 06:32:59 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[3D internet]]></category>
		<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[MQTT and RSMB]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[OpenSim]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[realXtend]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual HomeCamp]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[Carbon Goggles]]></category>
		<category><![CDATA[distributed sustainability]]></category>
		<category><![CDATA[home energy management]]></category>
		<category><![CDATA[open data]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[sensor networks and sustainability]]></category>
		<category><![CDATA[SHASPA]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[TweetaWatt]]></category>
		<category><![CDATA[Virtual Worlds]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3381</guid>
		<description><![CDATA[Today, I did a presentation, on connecting real, virtual, mobile, and augmented spaces to support sustainability, for Earth Week SL, with Dave Pentecost and Jim Purbrick, who presented on Carbon Goggles. Dave and I focused on sensor networks, open data, Pachube, OpenSim, and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21.png"><img class="alignnone size-medium wp-image-3382" title="picture-21" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21-300x225.png" alt="picture-21" width="300" height="225" /></a></p>
<p>Today, I did a presentation, on <a href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj" target="_blank">connecting real, virtual, mobile, and augmented spaces to support sustainability,</a> for <a href="http://slearthweek.wordpress.com/2009/04/10/earth-week-press-release-see-schedule-also/" target="_blank">Earth Week SL</a>, with <a href="http://www.gomaya.com/glyph/" target="_blank">Dave Pentecost</a> and <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a>, who presented on <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a>.</p>
<p>Dave and I focused on sensor networks, open data,<a href="http://www.pachube.com/" target="_blank"> Pachube</a>,  <a href="http://opensimulator.org/wiki/Main_Page" target="_blank">OpenSim,</a> and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will be picking up on some of these themes of sensor networks and sustainability next week in our presentation with <a href="http://www.darleon.com/" target="_blank">Dimitri Darras</a> at ITP,Â  NYU, Aprl 24th, 6.30 pm to 8 pm &#8211; <a href="http://itp.nyu.edu/sigs/news/special-event-open-sim/" target="_blank">details here</a>.Â  If you are in New York City, I hope to see you there.</p>
<p>We got some interesting insights into augmented reality from <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a> whose <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a> project prototypes how we can use augmented reality to read carbon identity and to combine well organized, verified data from <a href="http://www.amee.com/" target="_blank">AMEE</a> &#8211; a neutral aggregation platform to measure the &#8220;carbon footprint&#8221; of everything on earth, with crowd sourced tagging and linking.</p>
<h3>Shaspa &#8211; &#8220;the sensor network system that has it all&#8221;</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22.png"><img class="alignnone size-medium wp-image-3391" title="picture-22" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22-300x224.png" alt="picture-22" width="300" height="224" /></a></p>
<p>We also discussed, recently launched, <a href="http://www.shaspa.com/" target="_blank">Shaspa</a>. Shaspa&#8217;s energy management packages connect spaces &#8211; real, virtual, mobile and augmented.Â  Shaspa has been bloggedÂ  by <a href="http://www.maxping.org/business/real-life/virtual-management-of-energy-consumption-in-the-home.aspx/" target="_blank">Maxping</a> and <a href="http://www.virtualworldsnews.com/2009/04/shaspa-launches-home-energy-organizer-on-opensim.html" target="_blank">Virtual World News</a>, so you can read all about it, but the Shaspa device kit won&#8217;t be available until next week. Some key features of the Home EnergyÂ  package are listed on the slide above.Â  However, this evening, Dave Pentecost and I got a sneak preview of both the Shaspa commmunity and enterprise hardware and software packages from Shaspa founder Oliver Goh. We were pretty impressed.</p>
<p><strong>Dave:</strong> &#8220;<strong>It&#8217;s the ultimate hackable device for energy management!&#8221;</strong></p>
<p><strong>Oliver:</strong> <strong>&#8220;Bring us any sensor device &#8211; with documentation, and within three days we will put a driver into Shaspa.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost.jpg"><img class="alignnone size-medium wp-image-3392" title="daveandoliverpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost-300x178.jpg" alt="daveandoliverpost" width="300" height="178" /></a></p>
<p>Oliver is on the right and Dave on the left in the picture above. The picture below shows Shaspa in OpenSim. Oliver and I will be attending the <a href="http://www.3dtlc.com/"><span style="color: #810081;">3D Training, Learning and Collaboration</span></a> Conference in Washington, DC, next week.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23.png"><img class="alignnone size-medium wp-image-3412" title="picture-23" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23-300x208.png" alt="picture-23" width="300" height="208" /></a></p>
<h3>Links</h3>
<p>Here are some of the links that came up in the presentation as many people asked for them to be published. Dave also has them on <a href="http://www.gomaya.com/glyph/archives/002520.html#002520" target="_blank">his blog</a>.</p>
<p>SLIDES on GOOGLE DOCS:<br />
<a title="Earth Week SL Presentation, April 18th, 2009 - Google Docs" href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj">Earth Week SL Presentation, April 18th, 2009 &#8211; Google Docs</a></p>
<p><a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">Pachube, sensor networks</a></p>
<p><a href="http://www.gomaya.com/glyph" target="_blank">Dave&#8217;s blog covering Maya archaeology, jungle ecology, and technology</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/001914.html" target="_blank">Maya Frontier, Usumacinta River videos</a></p>
<p><a href="http://en.wikipedia.org/wiki/Collapse_(book)" target="_blank">Collapse</a></p>
<p><a href="microcontrollers http://arduino.cc/" target="_blank">Arduino</a></p>
<p><a href="http://community.pachube.com/tutorials" target="_blank">Pachube &#8211; tutorials</a></p>
<p><a href="http://apps.pachube.com/" target="_blank">Pachube Apps </a>-</p>
<p><a href="http://www.pachube.com/feeds/1284" target="_blank">Arduino-SL-Pachube data site</a></p>
<p><a href="http://www.pachube.com/feeds/1505" target="_blank">SL to Pachube site</a></p>
<p><a href="http://www.zachhoeken.com/connecting-to-the-world" target="_blank">Dave&#8217;s Danger Shield &#8211; Pachube  tutorial</a></p>
<p><a href="http://www.ladyada.net/make/tweetawatt/" target="_blank">TweetaWatt site (LadyAda)</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/002505.html" target="_blank">Dave&#8217;s post on TweetaWatt to Opensim/SL</a></p>
<p><a href="http://peterquirk.wordpress.com/2008/12/22/tutorial-using-the-streamlined-tool-chain-for-importing-sketchup-models-into-realxtend-04/" target="_blank">Peter Quirk&#8217;s post on Importing Sketchup into RealXtend</a></p>
<p><a href="http://opensimulator.org/wiki/Main_Page" target="_blank">Opensim</a></p>
<p><a href="http://www.realxtend.org/" target="_blank">RealXtend</a></p>
<p><a href="http://reactiongrid.com/" target="_blank">ReactionGrid</a></p>
<p><a href="http://homecamp.pbwiki.com/" target="_blank">homecamp</a></p>
<p><a href="http://www.cminion.com/wordpress/" target="_blank">cminion -wind turbines in OpenSim</a></p>
<p><a href="http://mikethebee.mevio.com/" target="_blank">MiketheBee</a></p>
<p><a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">Is it &#8220;OMG finally&#8221; for Augmented Reality?</a></p>
<p><a href="http://www.ugotrade.com/2008/12/15/smart-planetinterview-with-andy-stanford-clark/" target="_blank">Smart Planet: Interview with Andy Stanford-Clark</a></p>
<p><a href="http://www.orangecone.com/" target="_blank">Orange Cone &#8211; Information Shadows and Things as Services</a></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
		<item>
		<title>Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009</title>
		<link>https://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/</link>
		<comments>https://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/#comments</comments>
		<pubDate>Thu, 19 Mar 2009 03:16:11 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[#etech]]></category>
		<category><![CDATA[Aaaron Straup Cope]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[Ambient Orb]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[BlinkM]]></category>
		<category><![CDATA[Bocci at ETech]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[data shadows]]></category>
		<category><![CDATA[dematerializing products]]></category>
		<category><![CDATA[dematerializing the world]]></category>
		<category><![CDATA[dressing the shadows]]></category>
		<category><![CDATA[ecology of services]]></category>
		<category><![CDATA[econolypse]]></category>
		<category><![CDATA[embodied energy data]]></category>
		<category><![CDATA[energy identity]]></category>
		<category><![CDATA[Etech 2009]]></category>
		<category><![CDATA[Gavin Starks]]></category>
		<category><![CDATA[green technology]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[item level identification]]></category>
		<category><![CDATA[LilyPad]]></category>
		<category><![CDATA[LoveM]]></category>
		<category><![CDATA[Maker culture]]></category>
		<category><![CDATA[Makershed]]></category>
		<category><![CDATA[Mike Kuniavsky]]></category>
		<category><![CDATA[Moore's Law]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[Path Intelligence]]></category>
		<category><![CDATA[RFID tracking]]></category>
		<category><![CDATA[servicization of things]]></category>
		<category><![CDATA[smart LED]]></category>
		<category><![CDATA[spimes]]></category>
		<category><![CDATA[Stamen Design]]></category>
		<category><![CDATA[Steven Levy]]></category>
		<category><![CDATA[sustainable design]]></category>
		<category><![CDATA[the dotted line world]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[Thinglink project]]></category>
		<category><![CDATA[ThingM]]></category>
		<category><![CDATA[things as services]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[ubicomp hardware]]></category>
		<category><![CDATA[urban green space]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Wattzon]]></category>
		<category><![CDATA[WineM]]></category>
		<category><![CDATA[wireless networks]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3191</guid>
		<description><![CDATA[ETech 2009 was all about making interesting and deeply socially effective technological interventions in the world. And dematerializing products into services seemed to be one of the most powerful concepts elaborated there to accomplish this.Â  Mike Kuniavsky in his presentation, &#8220;The dotted-line world, shadows, services, subscriptions,&#8221; noted: &#8220;There&#8217;s great opportunity here to create an ecology [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/bicycleriderdatashadows.jpg"><img class="alignnone size-medium wp-image-3192" title="bicycleriderdatashadows" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/bicycleriderdatashadows-300x230.jpg" alt="bicycleriderdatashadows" width="300" height="230" /></a></p>
<p><a href="http://en.oreilly.com/et2009" target="_blank">ETech 2009</a> was all about making interesting and deeply socially effective technological interventions in the world. And dematerializing products into services seemed to be one of the most powerful concepts elaborated there to accomplish this.Â  Mike Kuniavsky in his presentation, <a href="http://en.oreilly.com/et2009/public/schedule/speaker/1947" target="_blank"><strong>&#8220;The dotted-line world, shadows, services, subscriptions,&#8221;</strong></a> noted:</p>
<p><strong>&#8220;There&#8217;s great opportunity here to create an ecology of services embodied as robust, valuable, exciting new tools with focused, limited functionality, tied together with item-level identification and wireless networks. Whole classes of things that can enrich our lives and bank accounts are now possible thanks to the way ubiquitous computing interweaves services and devices at an intimate, everyday level&#8230;.<br />
</strong><br />
<strong>We now have the technology to create whole new classes of tools for living in a way that is more useful and fun for individuals, more sustainable for society, and more profitable for companies. That way is to recognize the connectedness of all everyday things, and to build on it, rather than ignoring it.&#8221;</strong></p>
<p>The picture opening this post is from Mike&#8217;s presentation (see <a id="zuqd" title="Mike's blog" href="http://www.orangecone.com/archives/2009/03/etech_2009_the.html">Mike&#8217;s blog</a> forÂ  <a href="http://www.orangecone.com/tm_etech_2009_0.1.pdf">a PDF with all of the images and notes</a> (884 PDF), and the original presentation description).</p>
<p>An ecosystem usingÂ  item-level identiï¬cation, wireless networking, and data visualization is evolving that links everyday objects to information about those objects &#8211; what Kuniavsky calls their â€œinformation shadow.â€Â  Because every object can be uniquely identified and that identification can be associated with a cluster of metadata, it &#8220;exists simultaneously in the physical world and in the world of data.&#8221;</p>
<p>Mike mentioned Tom Coates&#8217; <a href="http://www.plasticbag.org/archives/2005/04/the_age_of_pointatthings/" target="_blank">&#8220;Age of Point-At Things&#8221;</a> blog post to say that although Tom was talking about TV listings data, the same ideas can be applied to anything that&#8217;s uniquely identified. Also, Mike noted, he often references Ulla-Maaria Mutanen&#8217;s <a href=" http://aula.org/people/ulla/thinglink_white_paper.pdf" target="_blank">Thinglink project</a> and her observation about Amazon ASINs to explain this concept which is, of course, closely related to <a href=" http://en.wikipedia.org/wiki/Internet_of_things" target="_blank">the internet of things.</a></p>
<p>Until recently, Mike explained, accessing the information shadow was difï¬cult. The world of objects and the world ofÂ  information shadows were separated by the difï¬culty of getting at the information. But now, increasingly:</p>
<p><strong>&#8220;we can instantaneously see the world of information shadows as weâ€™re interacting with the world of objects.&#8221; </strong></p>
<p>Mike&#8217;s is not only conceptualizing these ideas, his company with partner Tod E. Kurt, <a id="zh2z" title="Thingm" href="http://thingm.com/" target="_blank">Thing<span class="ru_CC6D50_bk">M,</span></a> is producing hardware that will enable this vision.</p>
<p><strong>&#8220;We&#8217;re a ubiquitous computing consumer electronics company, which sounds fancy, but weâ€™re pretty small. We design, manufacture and sell ubicomp hardware.&#8221;</strong></p>
<p>ThingM may be small now but they are at the leading edge of huge transformation.Â  When asked, &#8220;How do you see the near-future city working with ubiquitous computing&#8230;&#8221; Adam Greenfield put it succinctly to Lalie Nicolas for <a href="http://www.lehub-agence.com/site.php">Le Hub</a>â€™s <a href="http://www.ludigo.net/index.php?rub=0">Ludigo</a> project:</p>
<p><strong>&#8220;I would go so far as to say that there will be no area or domain of urban activity that is not somehow disassembled and recomposed as a digital, networked, interactive process over the next few years. Objects, buildings and spaces will be reconceived as network resources; cars, subways and bicycles will be reimagined as on-demand mobility services; human communities are already well on the way to becoming self-conscious &#8216;social networks.&#8217;â€</strong></p>
<p>For the rest of this short interview <a href="http://speedbird.wordpress.com/2009/03/16/ludigo-interview/" target="_blank">see Adam&#8217;s post</a>, and for my recent long interview with Adam <a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">see here</a>.</p>
<h3>&#8220;&#8216;Almost everything in this room is in a landfill, but just doesn&#8217;t know it yet.&#8217;Â  This needs to change&#8221;</h3>
<p>(Tim O&#8217;Reilly responding on Twitter to a quote from <a href="http://twitter.com/AlexSteffen" target="_blank">@AlexSteffen</a>&#8216;s talk)</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/picture-5.png"><img class="alignnone size-medium wp-image-3194" title="picture-5" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/picture-5-300x241.png" alt="picture-5" width="300" height="241" /></a></p>
<p><em><span class="caps">Chart above from Jeremy Faludi&#8217;s presentation</span> <a class="attach" href="http://assets.en.oreilly.com/1/event/20/Priorities%20for%20a%20Greener%20World_%20If%20You%20Could%20Design%20Anything,%20What%20Should%20You%20Do_%20Presentation.pdf">Priorities for a Greener World: If You Could Design Anything, What Should You Do? Presentation</a> <span class="en_filetype">[PDF]</span></em> <span class="caps"> </span></p>
<p>Interconnecting themes at ETech,Â  <a id="nn8n" title="Inhabitat notes" href="http://www.inhabitat.com/2009/03/13/the-best-of-green-at-etech-2009/" target="_blank">Inhabitat noted,</a> &#8220;formed bridges between luminary speakers from a variety of backgrounds, as <a href="http://www.inhabitat.com/2006/10/26/worldchanging-the-book-is-out/">Alex Steffen</a>, <a href="http://www.inhabitat.com/2008/02/20/mary-lou-jepsen-at-greener-gadgets/">Mary Lou Jepsen</a>, <a href="http://www.faludidesign.com/">Jeremy Faludi</a>, and others reinforced the need to create repairable, open-source, <a href="http://www.inhabitat.com/2009/03/02/greener-gadgets-2009/">long lasting products</a>, reveal energy usage, and pursue forward-thinking strategies for a greener tomorrow.&#8221; But <a href="http://www.faludidesign.com/" target="_blank">Jeremy Faludi</a>, a sustainable design strategist and researcher<span class="caps">, </span><span class="caps">put the design challenge most directly:</span></p>
<p><span class="caps"> <strong>&#8220;</strong></span><strong>If you really care you need to dematerialize, turn products into services&#8230;&#8221; </strong></p>
<p>The idea of data shadows has been a part of the conversation in ubiquitous computing for a long time (since Marshall McLuhan perhaps?).Â  But, at ETech 2009, it seemed to have come of age.</p>
<p>It came up again and again, in the need to dematerialize stuff that seemed to be part of every conversation, from Faludi&#8217;s comments on the amount of toxic mining waste created in the manufacture of one laptop, to Raffi Krikorian&#8217;s presentation of <a href="http://www.wattzon.com/" target="_blank">Wattzon&#8217;s</a> Embodied Energy Database (<a href="http://www.slideshare.net/raffikrikorian/wattzon-etech-2009" target="_blank">see slides here</a>), and <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> founder, Gavin Stark&#8217;s presentation, <a name="session7799"></a> (also see <a href="http://www.amee.com/blog/2009/03/19/energy-identity/">Gavin&#8217;s blog on Energy Identity here</a>).</p>
<p>The path to dematerializing the burdensome stuff that spells doom for our environment was not only presented conceptually and in creative solutions to specific problems (e.g. ThingM) at ETech. There were also hands on workshops (see <a href="http://www.ugotrade.com/2009/03/10/making-a-rfid-to-web-interface-and-lilypad-electronic-fashion-at-etech-2009/" target="_blank">my post on the two I attended</a>) from Maker gurus, who were also often to be found in the <a href="http://en.oreilly.com/et2009/public/schedule/detail/7281" target="_blank">Makershed</a>, providing opportunities to experiment with and prototype your own solutions (my hat is off to <a href="http://en.oreilly.com/et2009/public/content/about" target="_blank">Brady Forrest and the ETech committee</a> for pulling all this together).</p>
<h3>Connecting the dots&#8230;</h3>
<p>In the wake of an &#8220;econolypse,&#8221; (neologism pulled from Bruce Sterling&#8217;s twitter feed -Â  @bruces) and on the eve of environmental catastrophe, we may well have, as Adam Greenfield <a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">said to me here</a>, &#8220;seriously screwed the pooch.&#8221;</p>
<p>But that does not mean we should not do everything we can to try to save the day.</p>
<p>And in the serendipity peculiar to a conference, I was talking  in the corridor to Gavin Starks of <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> who is working to create &#8220;the world&#8217;s energy meter&#8221; (on the right in the picture below), and Tony Mak from <a id="hc7p" title="O'Reilly AlphaTech Ventures" href="http://www.oatv.com/" target="_blank">O&#8217;Reilly AlphaTech Ventures</a> (to Gavin&#8217;s right), and Usman Haque of <a id="vp25" title="Pachube" href="http://www.pachube.com/">Pachube</a> (on Tony&#8217;s right) <a id="ihta" title="-see my earlier interview here" href="../../2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">- see my earlier interview with Usman here</a>), when Tim O&#8217;Reilly (far left) came by with Steven Levy of WiredÂ  (to Tim&#8217;s left).Â  More on <a id="vp25" title="Pachube" href="http://www.pachube.com/">Pachube</a>, <a id="vwro" title="WattzOn" href="http://www.wattzon.com/" target="_blank">WattzOn</a>, <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> and <a href="http://www.pathintelligence.com/" target="_blank">Path Intelligence</a> and how these projects may connect in an upcoming post.Â  Path Intelligence like AMEE is funded by the O&#8217;Reilly Venture group.</p>
<p>And no sooner had I snapped the photo below, Mike Kuniavsky arrived.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_170dxf8g9hg_b.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/timoreillytalkingtogavinstarkspost2.jpg"><img class="alignnone size-medium wp-image-3276" title="timoreillytalkingtogavinstarkspost2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/timoreillytalkingtogavinstarkspost2-300x180.jpg" alt="timoreillytalkingtogavinstarkspost2" width="300" height="180" /></a></p>
<p>It seemed such an historic meeting, I asked everyone if I could switch my recorder on.</p>
<p>Tim had just been explaining how the concept of &#8220;data shadows&#8221; fit with something he&#8217;d learned from Gavin in a breakfast conversation. Â Gavin was talking about what AMEE is learning from smart meter data collected from 1.2 million homes in the UK. Â The energy signature from each device is so unique that you can tell not only the make and model of major appliances in each home, but its age. Â  Gavin is worried about the privacy implications (as we all should be), but nonetheless, you can see the implications for business. Tim framed a vital question:<strong> What new businesses are growing in the data shadows?</strong></p>
<p><strong>Tim O&#8217;Reilly: </strong>Here&#8217;s the other member of this conversation I was trying to broker. This is Mike Kuniavsky, Gavin Starks. I was talking in your session about the point he made in his session&#8230;Steve Levy from Wired&#8230;</p>
<p><strong>Tish Shute:</strong> sorry, could you recap the point?</p>
<p><strong>Tim O&#8217;Reilly:</strong> &#8230;just the idea about data shadows, I just think it&#8217;s just such a powerful metaphor that every .. and you went on to explain that potential for subscriptions and so on&#8230;</p>
<p><strong>Mike Kuniavsky:</strong> Yes well what I was saying was that essentially every object that has an identifier associated with it, and there are a number of different kinds of identifiers out there, simultaneously lives in kind of the world of physical objects, and of the world of data. And the identifier links those two.</p>
<p><strong>Steven Levy:</strong> Just like Sterling&#8217;s Spimes?</p>
<p><strong>Mike Kuniavsky:</strong> A spime, it&#8217;s related obviously because we&#8217;re talking about RFIDs, but I&#8217;m really specifically talking about the fact that there is this information shadow that exists out there.</p>
<p><strong>Tim O&#8217;Reilly:</strong> I think we&#8217;ll find it lots of different ways, that was my excitement in connecting these points.</p>
<p><strong>Gavin Starks:</strong> My take on it is energy identity &#8211; that everything and everybody ends up with an energy identity that is the embodiment of their physical consumption.</p>
<p><strong>Mike Kuniavsky:</strong> And I would say, not to argue, I would say that energy comes as part of my information shadow. Like I carry this baggage of data along with me. And whatever data is potentially appropriate can be glommed on to that. And then that can then be carried to something else that can manipulate it. And also that&#8217;s true about every object. And now that we have RFID tracking of individual objects, it&#8217;s true about literally every object, not just every class of objects.</p>
<p><strong>Usman Haque:</strong> There&#8217;s a really beautiful story by Julio Cortazar where he uses the phrase &#8220;dressing the shadows&#8221; and it&#8217;s about the idea the shadow is not this sort of flat black thing but we can sort of put things onto it and slowly sort of grow it into something. It&#8217;s actually sort of more of a love story. But it&#8217;s a really interesting idea that the shadow&#8217;s not just the absence of but that it&#8217;s kind of the important part of it [for more see Usman&#8217;s paper, <a href="http://www.haque.co.uk/papers/dressingshadowsofarch.pdf" target="_blank">Dressing the shadows of architecture</a> &#8211; which is also available in spanish <a href="http://www.tintank.es/articulo_vestirsombras.html" target="_blank">here</a>.]</p>
<p><strong>Mike Kuniavsky:</strong> It&#8217;s the Peter Pan Barrie [JM Barrie, the author] thing. When Peter Pan&#8217;s shadow gets cut off and Wendy has to resew it back on. Potentially what all of these item level identification technologies are doing is they&#8217;re sewing the shadow back to the objects that they came from. And so you&#8217;re getting the information.</p>
<p><strong>Gavin Starks:</strong> It&#8217;s like the two and a half kilo Macbook which has a 460 kilo carbon shadow.</p>
<p><strong>Tim O&#8217;Reilly:</strong> It&#8217;s just a very powerful concept. That&#8217;s all I&#8217;m saying. I think it&#8217;s a metaphor that as soon as you have it, it makes it very easy to understand and to see a whole lot of things. So I&#8217;m very fond of it. Already it&#8217;s my new favorite toy. And it is great running into you all in the same place in the hall so I could introduce you all.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_173c5f8nvcm_b.png"><img class="alignnone size-medium wp-image-3203" title="dhj5mk2g_173c5f8nvcm_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_173c5f8nvcm_b-300x231.png" alt="dhj5mk2g_173c5f8nvcm_b" width="300" height="231" /></a></p>
<p><em>Image from Mike&#8217;s ETech presentation</em><br />
<strong><br />
&#8220;To create these new experiences we need to think about the design of both digital devices and infrastructures differently. We need step back from standalone tools and think about what service those tools deliver, then construct new avatars that fit better into people&#8217;s everyday experiences. We also need to step back from our infrastructural products and think about what services they enable. The electrical grid did not first start out as an abstract electrical grid in South Manhattan; it started as a way to deliver electric light. The electric bulb was not a standalone device, it was an avatar of Edison&#8217;s light delivery service and it was, first and foremost, designed to solve a specific problem for a large consumer market. Only then did the infrastructure it created expand to solve other kinds of problems.&#8221; Mike Kuniavsky&#8217;s ETech presentation, 2009</strong></p>
<p><strong><br />
</strong></p>
<h3><strong>Talking With Mike Kuniavsky</strong></h3>
<p><strong> </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethandmikeballpost.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethgoodmanandmikekuniavskyballpost.jpg"><img class="alignnone size-medium wp-image-3280" title="elizabethgoodmanandmikekuniavskyballpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethgoodmanandmikekuniavskyballpost-300x199.jpg" alt="elizabethgoodmanandmikekuniavskyballpost" width="300" height="199" /></a><br />
</strong></p>
<p><em>Mike Kuniavsky and Elizabeth Goodman playing Bocci after ETech</em></p>
<p>The conversation with Mike began with a discussion about how to encourage participation. Usman Haque was present but he was called to lunch shortly.Â  The question of encouraging participation in deep social change was another recurring theme at ETech.Â  And, as Mike noted in his presentation:</p>
<p><strong>&#8220;The design of these avatars [Kuniavsky's term for objects that are closely tied to services] is quite challenging. They canâ€™t really be as personalized. You just can&#8217;t pimp your City Carshare car. You only get one kind of bike in the Call a Bike program. That&#8217;s an important problem to solve. We love to have our stuff be ours. However, the same technologies can bring that, too. Our key fob can bring our whole world with us, and whether sit down in a minivan, on a chair or in a plane we can bring our world with us. The thing can become our preferred colors, with our favorite music, and a picture of our loved ones on the dahboard, desk, or wall. Is it the same thing as owning it and Â  leaving your stuff in it? No, but it&#8217;s closer.&#8221;<br />
</strong></p>
<p>Moreover:</p>
<p><strong>.. objects have to change at a fundamental level. They have to be designed differently and they have to be described and discussed differently. The â€œownerâ€™sâ€ relationship to the object changes. The very idea of ownership changes. The solid object grows a dotted line that is filled-in as-needed, when-needed, and with the features that are needed. This is not the same thing as renting or co-ownership, its anytime/anywhere nature-enabled by the underlying technology makes these new service objects fundamentally new (Kuniavsky&#8217; presentation at ETech).<br />
</strong><br />
Elizabeth Goodman&#8217;s brilliant presentation at ETech, <a id="eag1" title="Designing for Urban Green Space" href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Designing for Urban Green Space,</a> discussed a study of urban green space volunteership as a way &#8220;to rethink urban green space as a spectrum of places with varying types of ownership and management.&#8221;Â  Mike began the conversation by citing Elizabeth&#8217;s work.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_178gdn22ngf_b.png"><img class="alignnone size-medium wp-image-3208" title="dhj5mk2g_178gdn22ngf_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_178gdn22ngf_b-300x219.png" alt="dhj5mk2g_178gdn22ngf_b" width="300" height="219" /></a></p>
<p><em>Picture from <a href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Elizabeth Goodman&#8217;s presentation</a>.</em></p>
<p><strong>Mike Kuniavsky:</strong> Well what I was saying [re participation], citing my wife Elizabeth Goodman&#8217;s work &#8230;She did all this work at Intel on people&#8217;s health practices and the issues [around] instrumenting people&#8217;s lives in order to produce behavioral change and the problems with that.</p>
<p>The question is how do you, sense to encourage, rather than sense to punish, when all the indicators are going down, like economic indicators, ecological indicators. They&#8217;re just not going to be going up perceptibly in a very long time. You don&#8217;t want to discourage people. The way to create behavioral change is not to essentially keep punishing people for the past. And so I don&#8217;t know if I have a good answer for this, but there is this entire kind of thinking about how do you encourage people to keep doing things even when the actual easy-to-measure indicators like the first order indicators are all pointing down. It&#8217;s the classic thing about how do you get people to stay fit even as they&#8217;re aging. They are never going to be as healthy as they were when they were 50 again.</p>
<p><strong>Usman Haque:</strong> I think you really hit on it when you said it&#8217;s not about the first order but about the second order measurements because that is exactly the kind of thing you want to change. It&#8217;s not that you want to stop it from falling because sometimes it&#8217;s impossible, you want to slow it&#8217;s rate.</p>
<p><strong>Mike Kuniavsky:</strong> Exactly. You want to slow the rate because at the bottom maybe you can start looking at the first order indicator. But you can&#8217;t look at the first order indicator while things are going to hell. And so you can just say it&#8217;s less bad than it would have been. And figuring out how to take the first order sensory data and turn it into this kind of second order data that might be helpful for actually creating behavioral change, because ultimately that&#8217;s what all of this is talking about.</p>
<p><strong>Tish Shute: </strong>This discussion about behavioral change wasn&#8217;t elaborated in your presentation was it?</p>
<p><strong>MK:</strong> I presented on essentiallyÂ  the combination of being able to identify individual objects and the idea of providing services as a way of creating things&#8230; the servicization of things &#8230;turningÂ  things into services is greatly accelerated by network technologies and the ability to track things and what leads this to the potential of having fundamentally different relationships to the devices in our lives and to things like ownership.</p>
<p>Like we now have the technology to create objects that are essentially representatives of services &#8211; things like City Car Share.Â  What you own is not a thing but a possibility space of a thing.Â  This fundamentally changes the design challenges.Â  I am pretty convinced that this is how we should be using a lot of these technologies is to be shifting objects from ownership models to service models.Â  We can do that but there are significant challenges with it. What is happening is that we have had the technology to do this for a while, but we haven&#8217;t be thinking about how to design these services.Â  We haven&#8217;t been thinking about how to design what I call the avatars of these services &#8211; the physical objects that are the manifestation of them, like an ATM is the avatar of a banking service.Â  It is useless without the banking service it is a representative of, essentially.</p>
<p>If you imagine a this as an abstract idea, the ATM pokes out of [the service and into] a specific thing, but so does the bank tellers and so does the web site.</p>
<p><strong>TS:</strong> It seems like this is a major shift in how we conceptualize our economy, culture and even government &#8211; what are the avatars of government?</p>
<p><strong>MK:</strong> I think change in government is very hard. The example I have been using is the light bulb.Â Â  Start by solving a problem. The interesting thing about lightbulbs is that it was not the invention of an incandescent filament that glowed in a vacuum&#8211;that had been invented long before&#8211;it was the system that it was part of.Â  And that is was part of a much larger design project that was created specifically for delivering the service of light to lower Manhattan in 1884.</p>
<p><strong>TS:</strong> The grid hasn&#8217;t changed since Edison right &#8211; one of the earlier speakers mentioned this, that if Edison came back now he would say, &#8220;the grid is where I left it.&#8221;</p>
<p><strong>MK:</strong> My point is that he wasn&#8217;t creating an abstract electrical grid, he was solving a problem by creating a system that had as its avatar &#8211; as its end point this bulb. But the bulb is actually not the system, it is merely the end point.</p>
<p>As we are thinking about the capabilities of these technologies my argument is we have to be designing service systems along that model.</p>
<p><strong>TS:</strong> Web services?</p>
<p><strong>MK:</strong> Not just designing Web services.Â  I am a big fan of thinking about digital tools outside the context of general purpose computing devices. I consider laptops general purpose and I consider phones general purpose.Â  Yes originally the handset started out just as a phone but now it is essentially a computer terminal and now you have netbooks and netbooks are essentially this halfway point between a phone and a laptop because now you are going to get net books with G3 cards.Â  Essentially it is already a big phone.Â  Those are general purpose computing platform, and I am not very interested in those.</p>
<p><strong>TS:</strong> What motivated you to make that move in your thinking?</p>
<p><strong>MK:</strong> I thought it was very narrow kind of thinking.Â  I thought that the costs of computing represented by the technologies in the middle of the Moore&#8217;s Law curve &#8211; rather than on the right &#8211; that the cost of that had dropped so far that it seemed we could be making all kinds of devices that had information processing as part of what it is without being general purpose computing platforms.</p>
<p>The ipod is a good example.Â  The ipod is a computer and you can run linux on it. It has more computing power than an computer did in the seventies. But who cares? The point of it is that you are using that power to solve a problem. You are applying the capabilities of information processing to solve specific problems. I have actually worked on infrastructural stuff. Twenty years ago I was associated with some early distributed computing stuff, then I did ten tears of web site design stuff, but i am essentially done with that. Because what I am really interested in isÂ  creating new kinds of tool, new classes of tools that use information processing as the core of what makes them interesting and valuable.</p>
<p><strong>TS:</strong> Do these tools have to leverage networks to be useful?</p>
<p><strong>MK: </strong> No I think it is possible to use information processing in a small scale without having to be online all the time.Â  That is another one of the big toolboxes.Â  It creates a deep shift in the capabilities of what you can do if you have a network.Â  But the network can be really, really low bandwidth and simple for it still to be useful. You get these things that wake up once a month and spit out a packet with their telemetry.Â  And they are incredibly valuable but they are not what you would normally consider to be an always-on device.Â  It changes what they can do very fundamentally.Â  But it is not this thing that requires there to be blanket wifi.</p>
<p>You can have devices out there and this is the sort of a cliched example but the guy riding a bicycle around with a wifi access point in rural area where you have no infrastructure to do it otherwise.Â  But you have a little computer in every area and as he rides by they will exchange some data.</p>
<p><strong>You don&#8217;t have to have fibre at the curb to really, really make interesting deeply socially effective technological interventions in the world. </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/aaaroncopetodekurtmikekuniavskypost.jpg"><img class="alignnone size-medium wp-image-3210" title="aaaroncopetodekurtmikekuniavskypost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/aaaroncopetodekurtmikekuniavskypost-300x199.jpg" alt="aaaroncopetodekurtmikekuniavskypost" width="300" height="199" /></a></strong></p>
<p><em><a id="d3_j" title="Aaron Starup Cope," href="http://en.oreilly.com/where2009/public/schedule/speaker/43824" target="_blank">Aaron Straup Cope,</a> Flickr, Tod E. Kurt, and Mike Kuniavsky &#8211; discussing <a id="rzgd" title="The Shape of Alpha" href="http://en.oreilly.com/where2009/public/schedule/detail/7212" target="_blank">The Shape of Alpha</a> (more on this upcoming!)<strong><br />
</strong></em><br />
<strong>MK:</strong> What we are trying to do is to do that.Â  We make a BlinkM &#8211; we make hardware &#8211; you saw my business partner Tod E. Kurt, he does all the heavy engineering and I am the guy who waves his hands around a lot and sends faxes.Â  We came out with our first product a year ago was a smart LED.Â  It is very simple RGB LED, it has a microcontroller and the microcontroller has firmware on it that kind of abstracts out the complexity of incorporating LEDs into a hobbyist product.Â  So you can do arbitrary colors, so it can do smooth fades between any two points in RGB space, you don&#8217;t need to know anything about Pulse Width Modulation or even microcontrollers.Â  You don&#8217;t have to know anything about anything except a little bit about electricity to use the thing. [In addition to <a id="hy-z" title="Blinkm" href="http://thingm.com/products/blinkm.html" target="_blank">BlinkM</a>, <a id="g8y3" title="Blinkm Maxm" href="http://thingm.com/products/blinkm-maxm.html" target="_blank">BlinkM MaxM</a> &#8211; the smart LED, Thingm has developed prototypes for other products such as the <a id="hqwc" title="Winem" href="http://thingm.com/products/winem.html" target="_blank">WineM</a> RFID wine rack and <a href="http://thingm.com/sketches/lovem.html" target="_blank">LoveM LCD chocolate box</a>.]</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_174cf26bcgn_b.png"><img class="alignnone size-medium wp-image-3211" title="dhj5mk2g_174cf26bcgn_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_174cf26bcgn_b-224x300.png" alt="dhj5mk2g_174cf26bcgn_b" width="224" height="300" /></a></p>
<p><strong>TS:</strong> I made a <a href="http://www.arduino.cc/en/Main/ArduinoBoardLilyPad" target="_blank">LilyPad</a> enabled Tshirt yesterday, if I used your LED what difference would that make to my Tshirt?</p>
<p><strong>MK:</strong> You could have the LED without changing the circuit at all, you could have it blink in any pattern, be any color, fade between colors. With our new one which is bigger than the old one, we actually have inputs. You could stick a wire on it or weave it into your shirt, and when you touch the wire it would change the behaviour of the LED.</p>
<p><strong>TS:</strong> Nice, you are giving me even more incentive to finish my T-Shirt. I noticed that Tim O&#8217;Reilly was connecting you to Gavin Starks, CEO of AMEE just now, and Usman Haque of Pachube.Â  What is the connection between you work on Thingm and these projects?</p>
<p><strong>MK:</strong> I think what Gavin&#8217;s doing, as I understand it from Tim, he is essentially creating this new kind of sensor network that monitors electrical usage and allows you to feed it back. What that does is that creates a new kind of data in the data shadow of your house, you refrigerator or whatever. It suddenly grows this extra lobe out in the data world that then has these new capabilities that can be attached to.</p>
<p><strong>TS: </strong>In terms of what you do with ThingM how are these ideas expressed through BlinkM?</p>
<p><strong>MK:</strong> We&#8217;re still building stuff that&#8217;s on a slightly lower level, components. Our corporate goal this year is to make our first product, a stand alone solution to something. One of the easiest things you can do with our technology right now is you can replicate an Ambient Orb in about ten minutes. You could tie into their work. But you could also tie into it in a more subtle way where you could make lights smart so that when the net electricity cost goes above a certain threshold the lights know to dim or to turn off. And that can be dependant on how people use them. So rather than having a light you essentially associate a function or purpose with a light. Then the light knows based on electricity usage when it&#8217;s purpose has high priority enough to be on.</p>
<p>Not all of these ideas pour into our products, we can only afford to make LEDs.</p>
<p><strong>TS:</strong> Still it is amazing how ThingM really is a flagship for what is big and important shift in the way we can relate to stuff. And what about Usman&#8217;s Pachube. Where does ThingM fit with that?</p>
<p><strong>MK:</strong> I see Pachube less as a monolithic service than as a standard for device communication. Essentially it&#8217;s a proposal for interdevice communication, and potentially an easy way for people to define the way devices behave within their own personal ecology of smart devices. It&#8217;s something that&#8217;s in the early stages, and I think the barriers are not technological, the barriers are social. The barriers are understanding what this is for and why to use it. It&#8217;s not about will it work. It&#8217;ll work.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_177pc5g76g5_b.png"><img class="alignnone size-medium wp-image-3213" title="dhj5mk2g_177pc5g76g5_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_177pc5g76g5_b-300x230.png" alt="dhj5mk2g_177pc5g76g5_b" width="300" height="230" /></a></p>
<p><em>Image from Mike&#8217;s ETech presentation &#8211; original image source: Yottamark</em></p>
<p><strong>&#8220;You can, hypothetically, look at any object and know where it was made, what it is made of, what your friends think of it, how much it sells for on Ebay, how to cook it, how to ï¬x it, how to recycle it, whatever. Any information thatâ€™s available about an object can now be available immediately and associated with that object.&#8221; </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_179fkxx3bg9_b.png"><img class="alignnone size-medium wp-image-3214" title="dhj5mk2g_179fkxx3bg9_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_179fkxx3bg9_b-300x231.png" alt="dhj5mk2g_179fkxx3bg9_b" width="300" height="231" /></a></strong></p>
<p><strong>&#8220;Connect it with location information and you have Location Based Services for anything. This is Cabspotting by Stamen. As Tom Coates says, once we have a handle, you can throw the data around.&#8221; (Kuniavsky)</strong></p>
<p>More to come on Stamen Design later! <a href="http://en.oreilly.com/public/schedule/speaker/2156">Tom Carden</a> (Stamen Design) ran a workshop at ETech 2008, <a id="bcqk" title="&quot;Live, Vast and Deep: Web-native Information Visualization,&quot;" href="http://en.oreilly.com/et2008/public/schedule/detail/1585" target="_blank">&#8220;Live, Vast and Deep: Web-native Information Visualization,&#8221;</a> outlining the process of taking a real data set from an online <span class="caps">API</span> (such as <a href="http://flickr.com/services/api">Flickr</a> or <a href="http://dopplr.pbwiki.com/">Dopplr</a>) and shaping it into an informative, beautiful, and useful interactive graphic presentation and this year, <a href="http://en.oreilly.com/et2009/public/schedule/speaker/3486">Michal Migurski</a> (Stamen Design),  	 	<a href="http://en.oreilly.com/et2009/public/schedule/speaker/40013">Shawn Allen</a> (Stamen Design) gave a workshop on <a id="nbzw" title="&quot;Maps from Scratch: Online Maps from the Ground Up.&quot;" href="http://en.oreilly.com/et2009/public/schedule/detail/5555" target="_blank">&#8220;Maps from Scratch: Online Maps from the Ground Up.&#8221;</a> <a id="k6oi" title="Eric Rodenbeck" href="http://en.oreilly.com/et2009/public/schedule/speaker/2160" target="_blank">Eric Rodenbeck</a>, founder and creative director of Stamen Design, presented on, <a id="q4up" title="&quot;New Data Visualization: Reaching Through Maps.&quot;" href="http://en.oreilly.com/et2009/public/schedule/detail/5438" target="_blank">&#8220;New Data Visualization: Reaching Through Maps.&#8221;</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_180g6zstxc4_b.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/ercirodenbeckandshawnallenpost.jpg"><img class="alignnone size-medium wp-image-3279" title="ercirodenbeckandshawnallenpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/ercirodenbeckandshawnallenpost-300x199.jpg" alt="ercirodenbeckandshawnallenpost" width="300" height="199" /></a></p>
<p><em>The picture above is of Eric Rodenbeck and Shawn Allen playing Bocci.</em></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/feed/</wfw:commentRss>
		<slash:comments>16</slash:comments>
		</item>
	</channel>
</rss>
